def get_tests( workflow_run_id: int, workflow_run_attempt: int ) -> Tuple[List[Dict[str, Any]], Dict[Any, Any]]: with TemporaryDirectory() as temp_dir: print("Using temporary directory:", temp_dir) os.chdir(temp_dir) # Download and extract all the reports (both GHA and S3) s3_paths = download_s3_artifacts("test-report", workflow_run_id, workflow_run_attempt) for path in s3_paths: unzip(path) artifact_paths = download_gha_artifacts("test-report", workflow_run_id, workflow_run_attempt) for path in artifact_paths: unzip(path) # Parse the reports and transform them to JSON test_cases = [] for xml_report in Path(".").glob("**/*.xml"): test_cases.extend( parse_xml_report( "testcase", xml_report, workflow_run_id, workflow_run_attempt, )) pytest_parallel_times = get_pytest_parallel_times() return test_cases, pytest_parallel_times
def get_sccache_stats(workflow_run_id: int, workflow_run_attempt: int) -> List[Dict[str, Any]]: with TemporaryDirectory() as temp_dir: print("Using temporary directory:", temp_dir) os.chdir(temp_dir) # Download and extract all the reports (both GHA and S3) download_s3_artifacts("sccache-stats", workflow_run_id, workflow_run_attempt) artifact_paths = download_gha_artifacts("sccache-stats", workflow_run_id, workflow_run_attempt) for path in artifact_paths: unzip(path) stats_jsons = [] for json_file in Path(".").glob("**/*.json"): with open(json_file) as f: stats_jsons.append(json.load(f)) return stats_jsons