1111from github_job_summary import JobSummary
1212from subdomains import Subdomains
1313from curl_wrapper import EXIT_CODES as CURL_EXIT_CODES
14- from curl_wrapper import CurlWrapper
1514from url_checker import UrlChecker
1615
1716"""
2120Check them with CURL
2221"""
2322
24- JOIN_TIMEOUT_SEC = 120
23+ JOIN_TIMEOUT_SEC : int = 120
2524
26- CURL_EXIT_CODES_AND_HTTP_CODES = {
25+ CURL_EXIT_CODES_AND_HTTP_CODES : dict [ str , tuple [ int , int | None ]] = {
2726 "https://api.aspose.cloud/connect/token" : (CURL_EXIT_CODES .HTTP_RETURNED_ERROR , 400 ),
2827 "https://api.aspose.cloud/v3.0" : (CURL_EXIT_CODES .HTTP_RETURNED_ERROR , 404 ),
2928 "https://api.aspose.cloud/v4.0" : (CURL_EXIT_CODES .HTTP_RETURNED_ERROR , 404 ),
4544 ]
4645)
4746
48- IGNORE_DOMAINS = Subdomains (
47+ IGNORE_DOMAINS : Subdomains = Subdomains (
4948 [
5049 ".android.com" ,
5150 ".apache.org" ,
8281 ]
8382)
8483
85- URL_END_CHARS = r",#\)\"'<>\*\s\\"
86- URL_RE_PATTERN = r"(https*://[^{0}]+)[{0}]?" .format (URL_END_CHARS )
84+ URL_END_CHARS : str = r",#\)\"'<>\*\s\\"
85+ URL_RE_PATTERN : str = r"(https*://[^{0}]+)[{0}]?" .format (URL_END_CHARS )
8786# print(URL_RE_PATTERN)
88- EXTRACT_URL_REGEX = re .compile (URL_RE_PATTERN , re .MULTILINE )
87+ EXTRACT_URL_REGEX : re . Pattern [ str ] = re .compile (URL_RE_PATTERN , re .MULTILINE )
8988
9089# URL : [Files]
9190EXTRACTED_URLS_WITH_FILES : dict [str , list [str ]] = {k : [] for k in URLS_TO_IGNORE }
@@ -129,7 +128,7 @@ def url_extractor(text: str, filename: str) -> typing.Generator[str, None, None]
129128 EXTRACTED_URLS_WITH_FILES [url ].append (filename )
130129
131130
132- FILES_TO_IGNORE = frozenset (
131+ FILES_TO_IGNORE : frozenset [ str ] = frozenset (
133132 [
134133 ".jar" ,
135134 ".jar" ,
@@ -154,38 +153,13 @@ def text_extractor(files: list[str]) -> typing.Generator[tuple[str, str], None,
154153 raise
155154
156155
157- def process_finished_task (task ) -> None :
158- # print("Finish task:", task.url)
159- expected_ret_code , expected_http_code = CURL_EXIT_CODES_AND_HTTP_CODES .get (task .url , (0 , None ))
160- if task .ret_code == 0 or task .ret_code == expected_ret_code :
161- print ("OK:" , "'%s' %.2fs" % (task .url , task .age ))
162- JOB_SUMMARY .add_success (task .url )
163- return
164-
165- if task .ret_code == CURL_EXIT_CODES .HTTP_RETURNED_ERROR and expected_http_code :
166- # Try parse stderr for HTTP code
167- match = CurlWrapper .CURL_STDERR_HTTP_RE .match (task .stderr )
168- assert match , "Unexpected output: %s" % task .stderr
169- http_code = int (match .groupdict ()["http_code" ])
170- if http_code == expected_http_code :
171- print ("OK HTTP:" , "'%s' %.2fs" % (task .url , task .age ))
172- JOB_SUMMARY .add_success (task .url )
173- return
174-
175- print (
176- "Expected %d got %d for '%s': %s" % (expected_ret_code , task .ret_code , task .url , task .stderr ),
177- file = sys .stderr ,
178- )
179- JOB_SUMMARY .add_error (f"Broken URL '{ task .url } ': { task .stderr } Files: { EXTRACTED_URLS_WITH_FILES [task .url ]} " )
180-
181-
182- JOB_SUMMARY = JobSummary (os .environ .get ("GITHUB_STEP_SUMMARY" , "step_summary.md" ))
156+ JOB_SUMMARY : JobSummary = JobSummary (os .environ .get ("GITHUB_STEP_SUMMARY" , "step_summary.md" ))
183157JOB_SUMMARY .add_header ("Test all URLs" )
184158
185159
186160def main (files : list [str ]) -> int :
187161 url_checker = UrlChecker (
188- on_finish = process_finished_task ,
162+ expectations = CURL_EXIT_CODES_AND_HTTP_CODES ,
189163 )
190164
191165 # Setup signal handlers for graceful shutdown
@@ -212,6 +186,14 @@ def _handle_signal(_sig: int, _frame: typing.Any) -> None:
212186 flush = True ,
213187 )
214188
189+ # Collect results and write summary
190+ for res in url_checker .results :
191+ if res .ok :
192+ JOB_SUMMARY .add_success (res .url )
193+ else :
194+ files = EXTRACTED_URLS_WITH_FILES .get (res .url , [])
195+ JOB_SUMMARY .add_error (f"Broken URL '{ res .url } ': { res .stderr } Files: { files } " )
196+
215197 JOB_SUMMARY .finalize ("Checked {total} failed **{failed}**\n Good={success}" )
216198 if JOB_SUMMARY .has_errors :
217199 print (JOB_SUMMARY , file = sys .stderr , flush = True )
0 commit comments