def disable_test_get_platform(): tests = [ ('test-linux64-ccov/opt-mochitest-1', 'linux'), ('test-windows10-64-ccov/debug-mochitest-1', 'windows'), ] for (name, platform) in tests: assert taskcluster.get_platform(name) == platform
def download_all(self): os.makedirs(self.parent_dir, exist_ok=True) # The test tasks for the Linux and Windows builds are in the same group, # but the following code is generic and supports build tasks split in # separate groups. groups = set([ taskcluster.get_task_details(build_task_id)['taskGroupId'] for build_task_id in self.task_ids.values() ]) test_tasks = [ task for group in groups for task in taskcluster.get_tasks_in_group(group) if taskcluster.is_coverage_task(task) ] # Choose best tasks to download (e.g. 'completed' is better than 'failed') download_tasks = {} for test_task in test_tasks: status = test_task['status']['state'] assert status in ALL_STATUSES chunk_name = taskcluster.get_chunk( test_task['task']['metadata']['name']) platform_name = taskcluster.get_platform( test_task['task']['metadata']['name']) # Ignore awsy and talos as they aren't actually suites of tests. if any(to_ignore in chunk_name for to_ignore in self.suites_to_ignore): continue if (chunk_name, platform_name) not in download_tasks: # If the chunk hasn't been downloaded before, this is obviously the best task # to download it from. download_tasks[(chunk_name, platform_name)] = test_task else: # Otherwise, compare the status of this task with the previously selected task. prev_task = download_tasks[(chunk_name, platform_name)] if STATUS_VALUE[status] > STATUS_VALUE[prev_task['status'] ['state']]: download_tasks[(chunk_name, platform_name)] = test_task with ThreadPoolExecutorResult() as executor: for test_task in test_tasks: executor.submit(self.download, test_task) logger.info('Code coverage artifacts downloaded')
def generate_chunk_mapping(self): with ThreadPoolExecutor(max_workers=4) as executor: futures = {} for platform in ['linux', 'windows']: for chunk in self.artifactsHandler.get_chunks(): future = executor.submit(grcov.files_list, self.artifactsHandler.get(platform=platform, chunk=chunk), source_dir=self.repo_dir) futures[future] = (platform, chunk) with sqlite3.connect('chunk_mapping.sqlite') as conn: c = conn.cursor() c.execute('CREATE TABLE file_to_chunk (path text, platform text, chunk text)') c.execute('CREATE TABLE chunk_to_test (platform text, chunk text, path text)') for future in concurrent.futures.as_completed(futures): (platform, chunk) = futures[future] files = future.result() c.executemany('INSERT INTO file_to_chunk VALUES (?,?,?)', ((f, platform, chunk) for f in files)) try: # Retrieve chunk -> tests mapping from ActiveData. r = requests.post('https://activedata.allizom.org/query', data=json.dumps({ 'from': 'unittest', 'where': {'and': [ {'eq': {'repo.branch.name': 'mozilla-central'}}, {'eq': {'repo.changeset.id12': self.revision[:12]}}, {'or': [ {'prefix': {'run.key': 'test-linux64-ccov'}}, {'prefix': {'run.key': 'test-windows10-64-ccov'}} ]} ]}, 'limit': 50000, 'select': ['result.test', 'run.key'] })) tests_data = r.json()['data'] task_names = tests_data['run.key'] test_iter = enumerate(tests_data['result.test']) chunk_test_iter = ((taskcluster.get_platform(task_names[i]), taskcluster.get_chunk(task_names[i]), test) for i, test in test_iter) c.executemany('INSERT INTO chunk_to_test VALUES (?,?,?)', chunk_test_iter) except KeyError: # ActiveData is failing too often, so we need to ignore the error here. logger.error('Failed to retrieve chunk to tests mapping from ActiveData.') with tarfile.open('chunk_mapping.tar.xz', 'w:xz') as tar: tar.add('chunk_mapping.sqlite')
def download(self, test_task): chunk_name = taskcluster.get_chunk( test_task['task']['metadata']['name']) platform_name = taskcluster.get_platform( test_task['task']['metadata']['name']) test_task_id = test_task['status']['taskId'] for artifact in taskcluster.get_task_artifacts(test_task_id): if not any(n in artifact['name'] for n in ['code-coverage-grcov.zip', 'code-coverage-jsvm.zip']): continue artifact_path = self.generate_path(platform_name, chunk_name, artifact) taskcluster.download_artifact(artifact_path, test_task_id, artifact['name']) logger.info('%s artifact downloaded' % artifact_path)