def test_get_suite(): tests = [ ('mochitest-1', 'mochitest'), ('mochitest-7', 'mochitest'), ('cppunit', 'cppunit'), ('firefox-ui-functional-remote', 'firefox-ui-functional-remote'), ] for (chunk, suite) in tests: assert taskcluster.get_suite(chunk) == suite
def test_get_suite(): tests = [ ("mochitest-1", "mochitest"), ("mochitest-7", "mochitest"), ("cppunit", "cppunit"), ("firefox-ui-functional-remote", "firefox-ui-functional-remote"), ("build", "build"), ] for (chunk, suite) in tests: assert taskcluster.get_suite(chunk) == suite
def test_get_suite(): tests = [ ('mochitest-1', 'mochitest'), ('mochitest-7', 'mochitest'), ('cppunit', 'cppunit'), ('firefox-ui-functional-remote', 'firefox-ui-functional-remote'), ('build', 'build'), ] for (chunk, suite) in tests: assert taskcluster.get_suite(chunk) == suite
def download(self, test_task): suite = taskcluster.get_suite(test_task["task"]) chunk_name = taskcluster.get_chunk(test_task["task"]) platform_name = taskcluster.get_platform(test_task["task"]) test_task_id = test_task["status"]["taskId"] for artifact in taskcluster.get_task_artifacts(test_task_id): if not any(n in artifact["name"] for n in ["code-coverage-grcov.zip", "code-coverage-jsvm.zip"]): continue artifact_path = self.generate_path(platform_name, chunk_name, artifact) taskcluster.download_artifact(artifact_path, test_task_id, artifact["name"]) logger.info("%s artifact downloaded" % artifact_path) self.artifacts.append( Artifact(artifact_path, test_task_id, platform_name, suite, chunk_name))
def generate(repo_dir, revision, artifactsHandler, out_dir='.'): logger.info('Generating chunk mapping...') sqlite_file = os.path.join(out_dir, 'chunk_mapping.sqlite') tarxz_file = os.path.join(out_dir, 'chunk_mapping.tar.xz') with sqlite3.connect(sqlite_file) as conn: logger.info('Creating tables.') c = conn.cursor() c.execute('CREATE TABLE file_to_chunk (path text, platform text, chunk text)') c.execute('CREATE TABLE chunk_to_test (platform text, chunk text, path text)') c.execute('CREATE TABLE file_to_test (source text, test text)') logger.info('Populating file_to_test table.') test_coverage_suites = get_test_coverage_suites() logger.info('Found {} test suites.'.format(len(test_coverage_suites))) for suites in group_by_20k(test_coverage_suites): test_coverage_tests = get_test_coverage_tests(suites) for tests in group_by_20k(test_coverage_tests): tests_files_data = get_test_coverage_files(tests) source_names = tests_files_data['source.file.name'] test_iter = enumerate(tests_files_data['test.name']) source_test_iter = ((source_names[i], test) for i, test in test_iter) c.executemany('INSERT INTO file_to_test VALUES (?,?)', source_test_iter) with ThreadPoolExecutor(max_workers=4) as executor: futures = {} for platform in PLATFORMS: logger.info('Reading chunk coverage artifacts for {}.'.format(platform)) for chunk in artifactsHandler.get_chunks(platform): suite = taskcluster.get_suite(chunk) if not is_chunk_only_suite(suite): continue assert chunk.strip() != '', 'chunk can not be an empty string' artifacts = artifactsHandler.get(platform=platform, chunk=chunk) assert len(artifacts) > 0, 'There should be at least one artifact' future = executor.submit(grcov.files_list, artifacts, source_dir=repo_dir) futures[future] = (platform, chunk) logger.info('Populating chunk_to_test table for {}.'.format(platform)) for suite in get_suites(revision): if not is_chunk_only_suite(suite): continue tests_data = get_tests_chunks(revision, platform, suite) if len(tests_data) == 0: logger.warn('No tests found for platform {} and suite {}.'.format(platform, suite)) continue logger.info('Adding tests for platform {} and suite {}'.format(platform, suite)) task_names = tests_data['run.key'] test_iter = enumerate(tests_data['result.test']) chunk_test_iter = ((platform, taskcluster.get_chunk(task_names[i]), test) for i, test in test_iter) c.executemany('INSERT INTO chunk_to_test VALUES (?,?,?)', chunk_test_iter) logger.info('Populating file_to_chunk table.') for future in concurrent.futures.as_completed(futures): (platform, chunk) = futures[future] files = future.result() c.executemany('INSERT INTO file_to_chunk VALUES (?,?,?)', ((f, platform, chunk) for f in files)) logger.info('Writing the chunk mapping archive at {}.'.format(tarxz_file)) with tarfile.open(tarxz_file, 'w:xz') as tar: tar.add(sqlite_file, os.path.basename(sqlite_file))
def generate(repo_dir, revision, artifactsHandler, out_dir='.'): logger.info('Generating chunk mapping...') sqlite_file = os.path.join(out_dir, 'chunk_mapping.sqlite') tarxz_file = os.path.join(out_dir, 'chunk_mapping.tar.xz') with sqlite3.connect(sqlite_file) as conn: logger.info('Creating tables.') c = conn.cursor() c.execute('CREATE TABLE file_to_chunk (path text, platform text, chunk text)') c.execute('CREATE TABLE chunk_to_test (platform text, chunk text, path text)') c.execute('CREATE TABLE file_to_test (source text, test text)') logger.info('Populating file_to_test table.') test_coverage_suites = get_test_coverage_suites() logger.info('Found {} test suites.'.format(len(test_coverage_suites))) for suites in group_by_20k(test_coverage_suites): test_coverage_tests = get_test_coverage_tests(suites) for tests in group_by_20k(test_coverage_tests): tests_files_data = get_test_coverage_files(tests) source_names = tests_files_data['source.file.name'] test_iter = enumerate(tests_files_data['test.name']) source_test_iter = ((source_names[i], test) for i, test in test_iter) c.executemany('INSERT INTO file_to_test VALUES (?,?)', source_test_iter) with ThreadPoolExecutor(max_workers=4) as executor: futures = {} for platform in PLATFORMS: logger.info('Reading chunk coverage artifacts for {}.'.format(platform)) for chunk in artifactsHandler.get_chunks(): suite = taskcluster.get_suite(chunk) if not is_chunk_only_suite(suite): continue future = executor.submit(grcov.files_list, artifactsHandler.get(platform=platform, chunk=chunk), source_dir=repo_dir) futures[future] = (platform, chunk) logger.info('Populating chunk_to_test table for {}.'.format(platform)) for suite in get_suites(revision): if not is_chunk_only_suite(suite): continue tests_data = get_tests_chunks(revision, platform, suite) if len(tests_data) == 0: logger.warn('No tests found for platform {} and suite {}.'.format(platform, suite)) continue logger.info('Adding tests for platform {} and suite {}'.format(platform, suite)) task_names = tests_data['run.key'] test_iter = enumerate(tests_data['result.test']) chunk_test_iter = ((platform, taskcluster.get_chunk(task_names[i]), test) for i, test in test_iter) c.executemany('INSERT INTO chunk_to_test VALUES (?,?,?)', chunk_test_iter) logger.info('Populating file_to_chunk table.') for future in concurrent.futures.as_completed(futures): (platform, chunk) = futures[future] files = future.result() c.executemany('INSERT INTO file_to_chunk VALUES (?,?,?)', ((f, platform, chunk) for f in files)) logger.info('Writing the chunk mapping archive at {}.'.format(tarxz_file)) with tarfile.open(tarxz_file, 'w:xz') as tar: tar.add(sqlite_file, os.path.basename(sqlite_file))
def test_get_suite(task_name, expected): task = json.load(open(os.path.join(FIXTURES_DIR, f"{task_name}.json"))) assert taskcluster.get_suite(task) == expected
def _inner_generate(repo_dir, revision, artifactsHandler, per_test_cursor, per_chunk_cursor, executor): per_test_cursor.execute( "CREATE TABLE file_to_chunk (path text, platform text, chunk text)") per_test_cursor.execute( "CREATE TABLE chunk_to_test (platform text, chunk text, path text)") per_test_cursor.execute( "CREATE TABLE file_to_test (source text, test text)") per_chunk_cursor.execute( "CREATE TABLE file_to_chunk (path text, platform text, chunk text)") per_chunk_cursor.execute( "CREATE TABLE chunk_to_test (platform text, chunk text, path text)") logger.info("Populating file_to_test table.") test_coverage_suites = get_test_coverage_suites() logger.info("Found {} test suites.".format(len(test_coverage_suites))) for suites in group_by_20k(test_coverage_suites): test_coverage_tests = get_test_coverage_tests(suites) for tests in group_by_20k(test_coverage_tests): tests_files_data = get_test_coverage_files(tests) source_names = tests_files_data["source.file.name"] test_iter = enumerate(tests_files_data["test.name"]) source_test_iter = ((source_names[i], test) for i, test in test_iter) per_test_cursor.executemany( "INSERT INTO file_to_test VALUES (?,?)", source_test_iter) futures = {} for platform in PLATFORMS: logger.info( "Reading chunk coverage artifacts for {}.".format(platform)) for chunk in artifactsHandler.get_chunks(platform): assert chunk.strip() != "", "chunk can not be an empty string" artifacts = artifactsHandler.get(platform=platform, chunk=chunk) assert len(artifacts) > 0, "There should be at least one artifact" future = executor.submit(grcov.files_list, artifacts, source_dir=repo_dir) futures[future] = (platform, chunk) logger.info("Populating chunk_to_test table for {}.".format(platform)) for suite in get_suites(revision): tests_data = get_tests_chunks(revision, platform, suite) if len(tests_data) == 0: logger.warn( "No tests found for platform {} and suite {}.".format( platform, suite)) continue logger.info("Adding tests for platform {} and suite {}".format( platform, suite)) task_names = tests_data["run.key"] def chunk_test_iter(): test_iter = enumerate(tests_data["result.test"]) return ((platform, taskcluster.get_chunk(task_names[i]), test) for i, test in test_iter) if is_chunk_only_suite(suite): per_test_cursor.executemany( "INSERT INTO chunk_to_test VALUES (?,?,?)", chunk_test_iter()) per_chunk_cursor.executemany( "INSERT INTO chunk_to_test VALUES (?,?,?)", chunk_test_iter()) logger.info("Populating file_to_chunk table.") for future in concurrent.futures.as_completed(futures): (platform, chunk) = futures[future] files = future.result() suite = taskcluster.get_suite(chunk) if is_chunk_only_suite(suite): per_test_cursor.executemany( "INSERT INTO file_to_chunk VALUES (?,?,?)", ((f, platform, chunk) for f in files), ) per_chunk_cursor.executemany( "INSERT INTO file_to_chunk VALUES (?,?,?)", ((f, platform, chunk) for f in files), )