You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

274 lines
7.9 KiB

  1. # -*- coding: utf-8 -*-
  2. import re
  3. import sys
  4. import random
  5. from typing import List, Tuple
  6. import requests
  7. from requests.models import Response
  8. def find_links_in_text(text: str) -> List[str]:
  9. """Find links in a text and return a list of URLs."""
  10. link_pattern = re.compile(r'((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'\".,<>?«»“”‘’]))')
  11. raw_links = re.findall(link_pattern, text)
  12. links = [
  13. str(raw_link[0]) for raw_link in raw_links
  14. ]
  15. return links
  16. #blablabla
  17. def find_links_in_file(filename: str) -> List[str]:
  18. """Find links in a file and return a list of URLs from text file."""
  19. with open(filename, mode='r', encoding='utf-8') as file:
  20. readme = file.read()
  21. index_section = readme.find('## Index')
  22. if index_section == -1:
  23. index_section = 0
  24. content = readme[index_section:]
  25. links = find_links_in_text(content)
  26. return links
  27. def check_duplicate_links(links: List[str]) -> Tuple[bool, List]:
  28. """Check for duplicated links.
  29. Returns a tuple with True or False and duplicate list.
  30. """
  31. seen = {}
  32. duplicates = []
  33. has_duplicate = False
  34. for link in links:
  35. link = link.rstrip('/')
  36. if link not in seen:
  37. seen[link] = 1
  38. else:
  39. if seen[link] == 1:
  40. duplicates.append(link)
  41. if duplicates:
  42. has_duplicate = True
  43. return (has_duplicate, duplicates)
  44. def fake_user_agent() -> str:
  45. """Faking user agent as some hosting services block not-whitelisted UA."""
  46. user_agents = [
  47. 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1467.0 Safari/537.36',
  48. 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/605.1.15 (KHTML, like Gecko)',
  49. 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36',
  50. 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36',
  51. ]
  52. return random.choice(user_agents)
  53. def get_host_from_link(link: str) -> str:
  54. host = link.split('://', 1)[1] if '://' in link else link
  55. # Remove routes, arguments and anchors
  56. if '/' in host:
  57. host = host.split('/', 1)[0]
  58. elif '?' in host:
  59. host = host.split('?', 1)[0]
  60. elif '#' in host:
  61. host = host.split('#', 1)[0]
  62. return host
  63. def has_cloudflare_protection(resp: Response) -> bool:
  64. """Checks if there is any cloudflare protection in the response.
  65. Cloudflare implements multiple network protections on a given link,
  66. this script tries to detect if any of them exist in the response from request.
  67. Common protections have the following HTTP code as a response:
  68. - 403: When host header is missing or incorrect (and more)
  69. - 503: When DDOS protection exists
  70. See more about it at:
  71. - https://support.cloudflare.com/hc/en-us/articles/115003014512-4xx-Client-Error
  72. - https://support.cloudflare.com/hc/en-us/articles/115003011431-Troubleshooting-Cloudflare-5XX-errors
  73. - https://www.cloudflare.com/ddos/
  74. - https://superuser.com/a/888526
  75. Discussions in issues and pull requests:
  76. - https://github.com/public-apis/public-apis/pull/2409
  77. - https://github.com/public-apis/public-apis/issues/2960
  78. """
  79. code = resp.status_code
  80. server = resp.headers.get('Server') or resp.headers.get('server')
  81. cloudflare_flags = [
  82. '403 Forbidden',
  83. 'cloudflare',
  84. 'Cloudflare',
  85. 'Security check',
  86. 'Please Wait... | Cloudflare',
  87. 'We are checking your browser...',
  88. 'Please stand by, while we are checking your browser...',
  89. 'Checking your browser before accessing',
  90. 'This process is automatic.',
  91. 'Your browser will redirect to your requested content shortly.',
  92. 'Please allow up to 5 seconds',
  93. 'DDoS protection by',
  94. 'Ray ID:',
  95. 'Cloudflare Ray ID:',
  96. '_cf_chl',
  97. '_cf_chl_opt',
  98. '__cf_chl_rt_tk',
  99. 'cf-spinner-please-wait',
  100. 'cf-spinner-redirecting'
  101. ]
  102. if code in [403, 503] and server == 'cloudflare':
  103. html = resp.text
  104. flags_found = [flag in html for flag in cloudflare_flags]
  105. any_flag_found = any(flags_found)
  106. if any_flag_found:
  107. return True
  108. return False
  109. def check_if_link_is_working(link: str) -> Tuple[bool, str]:
  110. """Checks if a link is working.
  111. If an error is identified when the request for the link occurs,
  112. the return will be a tuple with the first value True and the second
  113. value a string containing the error message.
  114. If no errors are identified, the return will be a tuple with the
  115. first value False and the second an empty string.
  116. """
  117. has_error = False
  118. error_message = ''
  119. try:
  120. resp = requests.get(link, timeout=25, headers={
  121. 'User-Agent': fake_user_agent(),
  122. 'host': get_host_from_link(link)
  123. })
  124. code = resp.status_code
  125. if code >= 400 and not has_cloudflare_protection(resp):
  126. has_error = True
  127. error_message = f'ERR:CLT: {code} : {link}'
  128. except requests.exceptions.SSLError as error:
  129. has_error = True
  130. error_message = f'ERR:SSL: {error} : {link}'
  131. except requests.exceptions.ConnectionError as error:
  132. has_error = True
  133. error_message = f'ERR:CNT: {error} : {link}'
  134. except (TimeoutError, requests.exceptions.ConnectTimeout):
  135. has_error = True
  136. error_message = f'ERR:TMO: {link}'
  137. except requests.exceptions.TooManyRedirects as error:
  138. has_error = True
  139. error_message = f'ERR:TMR: {error} : {link}'
  140. except (Exception, requests.exceptions.RequestException) as error:
  141. has_error = True
  142. error_message = f'ERR:UKN: {error} : {link}'
  143. return (has_error, error_message)
  144. def check_if_list_of_links_are_working(list_of_links: List[str]) -> List[str]:
  145. error_messages = []
  146. for link in list_of_links:
  147. has_error, error_message = check_if_link_is_working(link)
  148. if has_error:
  149. error_messages.append(error_message)
  150. return error_messages
  151. def start_duplicate_links_checker(links: List[str]) -> None:
  152. print('Checking for duplicate links...')
  153. has_duplicate_link, duplicates_links = check_duplicate_links(links)
  154. if has_duplicate_link:
  155. print(f'Found duplicate links:')
  156. for duplicate_link in duplicates_links:
  157. print(duplicate_link)
  158. sys.exit(1)
  159. else:
  160. print('No duplicate links.')
  161. def start_links_working_checker(links: List[str]) -> None:
  162. print(f'Checking if {len(links)} links are working...')
  163. errors = check_if_list_of_links_are_working(links)
  164. if errors:
  165. num_errors = len(errors)
  166. print(f'Apparently {num_errors} links are not working properly. See in:')
  167. for error_message in errors:
  168. print(error_message)
  169. sys.exit(1)
  170. def main(filename: str, only_duplicate_links_checker: bool) -> None:
  171. links = find_links_in_file(filename)
  172. start_duplicate_links_checker(links)
  173. if not only_duplicate_links_checker:
  174. start_links_working_checker(links)
  175. if __name__ == '__main__':
  176. num_args = len(sys.argv)
  177. only_duplicate_links_checker = False
  178. if num_args < 2:
  179. print('No .md file passed')
  180. sys.exit(1)
  181. elif num_args == 3:
  182. third_arg = sys.argv[2].lower()
  183. if third_arg == '-odlc' or third_arg == '--only_duplicate_links_checker':
  184. only_duplicate_links_checker = True
  185. else:
  186. print(f'Third invalid argument. Usage: python {__file__} [-odlc | --only_duplicate_links_checker]')
  187. sys.exit(1)
  188. filename = sys.argv[1]
  189. main(filename, only_duplicate_links_checker)