def test_download_all_ignore(LINUX_TASK_ID, LINUX_TASK, GROUP_TASKS_1, GROUP_TASKS_2, FAKE_ARTIFACTS_DIR): responses.add( responses.GET, 'https://queue.taskcluster.net/v1/task/{}'.format(LINUX_TASK_ID), json=LINUX_TASK, status=200) for group_tasks in _group_tasks(): responses.add( responses.GET, 'https://queue.taskcluster.net/v1/task-group/aPt9FbIdQwmhwDIPDYLuaw/list', json=group_tasks, status=200) a = ArtifactsHandler({'linux': LINUX_TASK_ID}, ['talos', 'xpcshell'], parent_dir=FAKE_ARTIFACTS_DIR) downloaded = set() def mock_download(task): downloaded.add(task['status']['taskId']) a.download = mock_download a.download_all() assert downloaded == set([ 'test-linux64-ccov/debug-mochitest-devtools-chrome-e10s-4-completed', 'test-linux64-ccov/debug-cppunit-completed', ])
def test_download_all(LINUX_TASK_ID, LINUX_TASK, GROUP_TASKS_1, GROUP_TASKS_2, FAKE_ARTIFACTS_DIR): responses.add( responses.GET, f"https://queue.taskcluster.net/v1/task/{LINUX_TASK_ID}", json=LINUX_TASK, status=200, ) for group_tasks in _group_tasks(): responses.add( responses.GET, "https://queue.taskcluster.net/v1/task-group/aPt9FbIdQwmhwDIPDYLuaw/list", json=group_tasks, status=200, ) a = ArtifactsHandler({"linux": LINUX_TASK_ID}, parent_dir=FAKE_ARTIFACTS_DIR) downloaded = set() def mock_download(task): downloaded.add(task["status"]["taskId"]) a.download = mock_download a.download_all() assert downloaded == set([ "test-linux64-ccov/debug-mochitest-devtools-chrome-e10s-4-completed", "test-windows10-64-ccov/debug-xpcshell-4-failed", "test-linux64-ccov/debug-cppunit-completed", ])
def test_get_coverage_artifacts(FAKE_ARTIFACTS_DIR): def add_dir(files): return set([os.path.join(FAKE_ARTIFACTS_DIR, f) for f in files]) a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR) assert set(a.get()) == add_dir(FILES) assert set(a.get(suite="mochitest")) == add_dir([ "windows_mochitest-1_code-coverage-jsvm.info", "linux_mochitest-2_code-coverage-grcov.zip", ]) assert set(a.get(chunk="xpcshell-7")) == add_dir([ "windows_xpcshell-7_code-coverage-jsvm.info", "linux_xpcshell-7_code-coverage-grcov.zip", ]) assert set(a.get(chunk="cppunit")) == add_dir( ["windows_cppunit_code-coverage-grcov.zip"]) assert set(a.get(platform="windows")) == add_dir([ "windows_mochitest-1_code-coverage-jsvm.info", "windows_xpcshell-7_code-coverage-jsvm.info", "windows_cppunit_code-coverage-grcov.zip", ]) assert set(a.get(platform="linux", chunk="xpcshell-7")) == add_dir( ["linux_xpcshell-7_code-coverage-grcov.zip"]) with pytest.raises(Exception, match="suite and chunk can't both have a value"): a.get(chunk="xpcshell-7", suite="mochitest")
def test_get_coverage_artifacts(FAKE_ARTIFACTS_DIR): def add_dir(files): return set([os.path.join(FAKE_ARTIFACTS_DIR, f) for f in files]) a = ArtifactsHandler([], [], parent_dir=FAKE_ARTIFACTS_DIR) assert set(a.get()) == add_dir(FILES) assert set(a.get(suite='mochitest')) == add_dir([ 'windows_mochitest-1_code-coverage-jsvm.info', 'linux_mochitest-2_code-coverage-grcov.zip' ]) assert set(a.get(chunk='xpcshell-7')) == add_dir([ 'windows_xpcshell-7_code-coverage-jsvm.info', 'linux_xpcshell-7_code-coverage-grcov.zip' ]) assert set(a.get(chunk='cppunit')) == add_dir( ['windows_cppunit_code-coverage-grcov.zip']) assert set(a.get(platform='windows')) == add_dir([ 'windows_mochitest-1_code-coverage-jsvm.info', 'windows_xpcshell-7_code-coverage-jsvm.info', 'windows_cppunit_code-coverage-grcov.zip', ]) assert set(a.get(platform='linux', chunk='xpcshell-7')) == add_dir( ['linux_xpcshell-7_code-coverage-grcov.zip']) with pytest.raises(Exception, message='suite and chunk can\'t both have a value'): a.get(chunk='xpcshell-7', suite='mochitest')
def __init__(self, repository, revision, task_name_filter, cache_root, client_id, access_token): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = [ 'web-platform-tests', ] self.cache_root = cache_root temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, 'ccov-artifacts') self.ccov_reports_dir = os.path.join(temp_dir, 'code-coverage-reports') self.client_id = client_id self.access_token = access_token self.index_service = get_service('index', client_id, access_token) self.githubUtils = GitHubUtils(cache_root, client_id, access_token) if revision is None: # Retrieve revision of latest codecov build self.github_revision = uploader.get_latest_codecov() self.repository = MOZILLA_CENTRAL_REPOSITORY self.revision = self.githubUtils.git_to_mercurial( self.github_revision) self.from_pulse = False else: self.github_revision = None self.repository = repository self.revision = revision self.from_pulse = True self.branch = self.repository[len(HG_BASE):] assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format( cache_root) self.repo_dir = os.path.join(cache_root, self.branch) logger.info('Mercurial revision', revision=self.revision) task_ids = {} for platform in [ 'linux', 'windows', 'android-test', 'android-emulator' ]: task = taskcluster.get_task(self.branch, self.revision, platform) # On try, developers might have requested to run only one platform, and we trust them. # On mozilla-central, we want to assert that every platform was run (except for android platforms # as they are unstable). if task is not None: task_ids[platform] = task elif self.repository == MOZILLA_CENTRAL_REPOSITORY and not platform.startswith( 'android'): raise Exception( 'Code coverage build failed and was not indexed.') self.artifactsHandler = ArtifactsHandler(task_ids, self.artifacts_dir, task_name_filter)
def test_get_chunks(FAKE_ARTIFACTS_DIR): a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR) assert a.get_chunks('windows') == { 'mochitest-1', 'xpcshell-7', 'cppunit', } assert a.get_chunks('linux') == { 'mochitest-2', 'xpcshell-3', 'xpcshell-7', 'firefox-ui-functional-remote', }
def test_get_chunks(FAKE_ARTIFACTS_DIR): a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR) assert a.get_chunks("windows") == {"mochitest-1", "xpcshell-7", "cppunit"} assert a.get_chunks("linux") == { "mochitest-2", "xpcshell-3", "xpcshell-7", "firefox-ui-functional-remote", }
def test_get_chunks(FAKE_ARTIFACTS_DIR): a = ArtifactsHandler([], [], parent_dir=FAKE_ARTIFACTS_DIR) assert set(a.get_chunks()) == set([ 'mochitest-1', 'mochitest-2', 'xpcshell-3', 'xpcshell-7', 'cppunit', 'firefox-ui-functional-remote', ])
def test_get_chunks(fake_artifacts): a = ArtifactsHandler([]) a.artifacts = fake_artifacts assert a.get_chunks("windows") == {"mochitest-1", "xpcshell-7", "cppunit"} assert a.get_chunks("linux") == { "mochitest-2", "xpcshell-3", "xpcshell-7", "firefox-ui-functional-remote", }
def test_generate_path(fake_artifacts): a = ArtifactsHandler([]) artifact_jsvm = {"name": "code-coverage-jsvm.info"} artifact_grcov = {"name": "code-coverage-grcov.zip"} assert os.path.join( a.parent_dir, "linux_xpcshell-3_code-coverage-jsvm.info" ) == a.generate_path("linux", "xpcshell-3", artifact_jsvm) assert os.path.join( a.parent_dir, "windows_cppunit_code-coverage-grcov.zip" ) == a.generate_path("windows", "cppunit", artifact_grcov)
def __init__(self, repository, revision, task_name_filter, cache_root): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = ["web-platform-tests"] self.cache_root = cache_root temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, "ccov-artifacts") self.index_service = taskcluster_config.get_service("index") if revision is None: # Retrieve latest ingested revision self.repository = MOZILLA_CENTRAL_REPOSITORY try: self.revision = uploader.gcp_latest("mozilla-central")[0]["revision"] except Exception as e: logger.warn( "Failed to retrieve the latest reports ingested: {}".format(e) ) raise self.from_pulse = False else: self.repository = repository self.revision = revision self.from_pulse = True self.branch = self.repository[len(HG_BASE) :] assert os.path.isdir(cache_root), "Cache root {} is not a dir.".format( cache_root ) self.repo_dir = os.path.join(cache_root, self.branch) logger.info("Mercurial revision", revision=self.revision) task_ids = {} for platform in ["linux", "windows", "android-test", "android-emulator"]: task = taskcluster.get_task(self.branch, self.revision, platform) # On try, developers might have requested to run only one platform, and we trust them. # On mozilla-central, we want to assert that every platform was run (except for android platforms # as they are unstable). if task is not None: task_ids[platform] = task elif ( self.repository == MOZILLA_CENTRAL_REPOSITORY and not platform.startswith("android") ): raise Exception("Code coverage build failed and was not indexed.") self.artifactsHandler = ArtifactsHandler( task_ids, self.artifacts_dir, task_name_filter )
def test_get_coverage_artifacts(FAKE_ARTIFACTS_DIR): def add_dir(files): return set([os.path.join(FAKE_ARTIFACTS_DIR, f) for f in files]) a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR) assert set(a.get()) == add_dir(FILES) assert set(a.get(suite='mochitest')) == add_dir([ 'windows_mochitest-1_code-coverage-jsvm.info', 'linux_mochitest-2_code-coverage-grcov.zip' ]) assert set(a.get(chunk='xpcshell-7')) == add_dir([ 'windows_xpcshell-7_code-coverage-jsvm.info', 'linux_xpcshell-7_code-coverage-grcov.zip' ]) assert set(a.get(chunk='cppunit')) == add_dir([ 'windows_cppunit_code-coverage-grcov.zip' ]) assert set(a.get(platform='windows')) == add_dir([ 'windows_mochitest-1_code-coverage-jsvm.info', 'windows_xpcshell-7_code-coverage-jsvm.info', 'windows_cppunit_code-coverage-grcov.zip', ]) assert set(a.get(platform='linux', chunk='xpcshell-7')) == add_dir([ 'linux_xpcshell-7_code-coverage-grcov.zip' ]) with pytest.raises(Exception, message='suite and chunk can\'t both have a value'): a.get(chunk='xpcshell-7', suite='mochitest')
def __init__( self, repository, revision, task_name_filter, cache_root, working_dir, required_platforms=[], ): os.makedirs(working_dir, exist_ok=True) self.artifacts_dir = os.path.join(working_dir, "ccov-artifacts") self.reports_dir = os.path.join(working_dir, "ccov-reports") logger.info( "Local storage initialized.", artifacts=self.artifacts_dir, reports=self.reports_dir, ) self.repository = repository self.revision = revision assert (self.revision is not None and self.repository is not None), "Missing repo/revision" logger.info("Mercurial setup", repository=self.repository, revision=self.revision) if cache_root is not None: assert os.path.isdir( cache_root), f"Cache root {cache_root} is not a dir." self.repo_dir = os.path.join(cache_root, self.branch) # Load coverage tasks for all platforms decision_task_id = taskcluster.get_decision_task( self.branch, self.revision) assert decision_task_id is not None, "The decision task couldn't be found" group = taskcluster.get_task_details(decision_task_id)["taskGroupId"] test_tasks = [ task for task in taskcluster.get_tasks_in_group(group) if taskcluster.is_coverage_task(task["task"]) ] # Check the required platforms are present platforms = set( taskcluster.get_platform(test_task["task"]) for test_task in test_tasks) for platform in required_platforms: assert platform in platforms, f"{platform} missing in the task group." self.artifactsHandler = ArtifactsHandler(test_tasks, self.artifacts_dir, task_name_filter)
def test_generate_path(FAKE_ARTIFACTS_DIR): a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR) artifact_jsvm = {'name': 'code-coverage-jsvm.info'} artifact_grcov = {'name': 'code-coverage-grcov.zip'} assert os.path.join( a.parent_dir, 'linux_xpcshell-3_code-coverage-jsvm.info') == a.generate_path( 'linux', 'xpcshell-3', artifact_jsvm) assert os.path.join( a.parent_dir, 'windows_cppunit_code-coverage-grcov.zip') == a.generate_path( 'windows', 'cppunit', artifact_grcov)
def __init__(self, revision, cache_root, client_id, access_token): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = [ 'web-platform-tests', ] self.cache_root = cache_root assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format( cache_root) self.repo_dir = os.path.join(cache_root, 'mozilla-central') temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, 'ccov-artifacts') self.ccov_reports_dir = os.path.join(temp_dir, 'code-coverage-reports') self.client_id = client_id self.access_token = access_token self.index_service = get_service('index', client_id, access_token) self.githubUtils = GitHubUtils(cache_root, client_id, access_token) if revision is None: # Retrieve revision of latest codecov build self.github_revision = uploader.get_latest_codecov() self.revision = self.githubUtils.git_to_mercurial( self.github_revision) self.from_pulse = False else: self.github_revision = None self.revision = revision self.from_pulse = True self.notifier = Notifier(self.repo_dir, revision, client_id, access_token) logger.info('Mercurial revision', revision=self.revision) task_ids = { 'linux': taskcluster.get_task('mozilla-central', self.revision, 'linux'), 'windows': taskcluster.get_task('mozilla-central', self.revision, 'win'), 'android-test': taskcluster.get_task('mozilla-central', self.revision, 'android-test'), 'android-emulator': taskcluster.get_task('mozilla-central', self.revision, 'android-emulator'), } self.artifactsHandler = ArtifactsHandler(task_ids, self.artifacts_dir)
def test_get_chunks(FAKE_ARTIFACTS_DIR): a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR) assert a.get_chunks('windows') == { 'mochitest-1', 'xpcshell-7', 'cppunit', } assert a.get_chunks('linux') == { 'mochitest-2', 'xpcshell-3', 'xpcshell-7', 'firefox-ui-functional-remote', }
def __init__( self, repository, revision, task_name_filter, cache_root, working_dir, required_platforms=[], ): os.makedirs(working_dir, exist_ok=True) self.artifacts_dir = os.path.join(working_dir, "ccov-artifacts") self.reports_dir = os.path.join(working_dir, "ccov-reports") logger.info( "Local storage initialized.", artifacts=self.artifacts_dir, reports=self.reports_dir, ) self.repository = repository self.revision = revision assert (self.revision is not None and self.repository is not None), "Missing repo/revision" logger.info("Mercurial setup", repository=self.repository, revision=self.revision) assert os.path.isdir( cache_root), f"Cache root {cache_root} is not a dir." self.repo_dir = os.path.join(cache_root, self.branch) # Load current coverage task for all platforms task_ids = { platform: taskcluster.get_task(self.branch, self.revision, platform) for platform in PLATFORMS } # Check the required platforms are present for platform in required_platforms: if not task_ids[platform]: raise Exception( f"Code coverage build on {platform} failed and was not indexed." ) self.artifactsHandler = ArtifactsHandler(task_ids, self.artifacts_dir, task_name_filter)
def test_download(mocked_download_artifact, mocked_get_task_artifact, TEST_TASK_FROM_GROUP, LINUX_TEST_TASK_ARTIFACTS): a = ArtifactsHandler([]) mocked_get_task_artifact.return_value = LINUX_TEST_TASK_ARTIFACTS['artifacts'] a.download(TEST_TASK_FROM_GROUP) assert mocked_get_task_artifact.call_count == 1 assert mocked_download_artifact.call_count == 2 assert mocked_download_artifact.call_args_list[0] == mock.call( 'ccov-artifacts/linux_mochitest-devtools-chrome-4_code-coverage-grcov.zip', 'AN1M9SW0QY6DZT6suL3zlQ', 'public/test_info/code-coverage-grcov.zip', ) assert mocked_download_artifact.call_args_list[1] == mock.call( 'ccov-artifacts/linux_mochitest-devtools-chrome-4_code-coverage-jsvm.zip', 'AN1M9SW0QY6DZT6suL3zlQ', 'public/test_info/code-coverage-jsvm.zip', )
def test_download(mocked_download_artifact, mocked_get_task_artifact, TEST_TASK_FROM_GROUP, LINUX_TEST_TASK_ARTIFACTS): a = ArtifactsHandler([]) mocked_get_task_artifact.return_value = LINUX_TEST_TASK_ARTIFACTS['artifacts'] a.download(TEST_TASK_FROM_GROUP) assert mocked_get_task_artifact.call_count == 1 assert mocked_download_artifact.call_count == 2 assert mocked_download_artifact.call_args_list[0] == mock.call( 'ccov-artifacts/linux_mochitest-devtools-chrome-4_code-coverage-grcov.zip', 'AN1M9SW0QY6DZT6suL3zlQ', 'public/test_info/code-coverage-grcov.zip', ) assert mocked_download_artifact.call_args_list[1] == mock.call( 'ccov-artifacts/linux_mochitest-devtools-chrome-4_code-coverage-jsvm.zip', 'AN1M9SW0QY6DZT6suL3zlQ', 'public/test_info/code-coverage-jsvm.zip', )
def test_download_all(LINUX_TASK_ID, LINUX_TASK, GROUP_TASKS_1, GROUP_TASKS_2, FAKE_ARTIFACTS_DIR): responses.add(responses.GET, 'https://queue.taskcluster.net/v1/task/{}'.format(LINUX_TASK_ID), json=LINUX_TASK, status=200) for group_tasks in _group_tasks(): responses.add(responses.GET, 'https://queue.taskcluster.net/v1/task-group/aPt9FbIdQwmhwDIPDYLuaw/list', json=group_tasks, status=200) a = ArtifactsHandler({'linux': LINUX_TASK_ID}, parent_dir=FAKE_ARTIFACTS_DIR) downloaded = set() def mock_download(task): downloaded.add(task['status']['taskId']) a.download = mock_download a.download_all() assert downloaded == set([ 'test-linux64-ccov/debug-mochitest-devtools-chrome-e10s-4-completed', 'test-windows10-64-ccov/debug-xpcshell-4-failed', 'test-linux64-ccov/debug-cppunit-completed', ])
def __init__(self, repository, revision, cache_root, client_id, access_token): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = [ 'web-platform-tests', ] self.cache_root = cache_root temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, 'ccov-artifacts') self.ccov_reports_dir = os.path.join(temp_dir, 'code-coverage-reports') self.client_id = client_id self.access_token = access_token self.index_service = get_service('index', client_id, access_token) self.githubUtils = GitHubUtils(cache_root, client_id, access_token) if revision is None: # Retrieve revision of latest codecov build self.github_revision = uploader.get_latest_codecov() self.repository = MOZILLA_CENTRAL_REPOSITORY self.revision = self.githubUtils.git_to_mercurial(self.github_revision) self.from_pulse = False else: self.github_revision = None self.repository = repository self.revision = revision self.from_pulse = True branch = self.repository[len(HG_BASE):] assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format(cache_root) self.repo_dir = os.path.join(cache_root, branch) logger.info('Mercurial revision', revision=self.revision) task_ids = {} for platform in ['linux', 'windows', 'android-test', 'android-emulator']: task = taskcluster.get_task(branch, self.revision, platform) # On try, developers might have requested to run only one platform, and we trust them. # On mozilla-central, we want to assert that every platform was run (except for android platforms # as they are unstable). if task is not None: task_ids[platform] = task elif self.repository == MOZILLA_CENTRAL_REPOSITORY and not platform.startswith('android'): raise Exception('Code coverage build failed and was not indexed.') self.artifactsHandler = ArtifactsHandler(task_ids, self.artifacts_dir)
class Hook(object): def __init__( self, repository, revision, task_name_filter, cache_root, working_dir, required_platforms=[], ): os.makedirs(working_dir, exist_ok=True) self.artifacts_dir = os.path.join(working_dir, "ccov-artifacts") self.reports_dir = os.path.join(working_dir, "ccov-reports") logger.info( "Local storage initialized.", artifacts=self.artifacts_dir, reports=self.reports_dir, ) self.repository = repository self.revision = revision assert (self.revision is not None and self.repository is not None), "Missing repo/revision" logger.info("Mercurial setup", repository=self.repository, revision=self.revision) assert os.path.isdir( cache_root), f"Cache root {cache_root} is not a dir." self.repo_dir = os.path.join(cache_root, self.branch) # Load current coverage task for all platforms task_ids = { platform: taskcluster.get_task(self.branch, self.revision, platform) for platform in PLATFORMS } # Check the required platforms are present for platform in required_platforms: if not task_ids[platform]: raise Exception( f"Code coverage build on {platform} failed and was not indexed." ) self.artifactsHandler = ArtifactsHandler(task_ids, self.artifacts_dir, task_name_filter) @property def branch(self): return self.repository[len(config.HG_BASE):] def clone_repository(self): cmd = hglib.util.cmdbuilder( "robustcheckout", self.repository, self.repo_dir, purge=True, sharebase="hg-shared", upstream="https://hg.mozilla.org/mozilla-unified", revision=self.revision, networkattempts=7, ) cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) logger.info("{} cloned".format(self.repository)) def retrieve_source_and_artifacts(self): with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone repository. executor.submit(self.clone_repository) def build_reports(self, only=None): """ Build all the possible covdir reports using current artifacts """ os.makedirs(self.reports_dir, exist_ok=True) reports = {} for ( (platform, suite), artifacts, ) in self.artifactsHandler.get_combinations().items(): if only is not None and (platform, suite) not in only: continue # Generate covdir report for that suite & platform logger.info( "Building covdir suite report", suite=suite, platform=platform, artifacts=len(artifacts), ) output = grcov.report(artifacts, source_dir=self.repo_dir, out_format="covdir") # Write output on FS path = os.path.join(self.reports_dir, f"{platform}.{suite}.json") with open(path, "wb") as f: f.write(output) reports[(platform, suite)] = path return reports def index_task(self, namespaces, ttl=180): """ Index current task on Taskcluster Index TTL is expressed in days """ assert isinstance(ttl, int) and ttl > 0 task_id = os.environ.get("TASK_ID") if task_id is None: logger.warning( "Skipping Taskcluster indexation, no task id found.") return index_service = taskcluster_config.get_service("index") for namespace in namespaces: index_service.insertTask( namespace, { "taskId": task_id, "rank": 0, "data": {}, "expires": (datetime.utcnow() + timedelta(ttl)).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), }, )
def test_generate_path(FAKE_ARTIFACTS_DIR): a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR) artifact_jsvm = {'name': 'code-coverage-jsvm.info'} artifact_grcov = {'name': 'code-coverage-grcov.zip'} assert os.path.join(a.parent_dir, 'linux_xpcshell-3_code-coverage-jsvm.info') == a.generate_path('linux', 'xpcshell-3', artifact_jsvm) assert os.path.join(a.parent_dir, 'windows_cppunit_code-coverage-grcov.zip') == a.generate_path('windows', 'cppunit', artifact_grcov)
class CodeCov(object): def __init__(self, repository, revision, cache_root, client_id, access_token): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = [ 'web-platform-tests', ] self.cache_root = cache_root temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, 'ccov-artifacts') self.ccov_reports_dir = os.path.join(temp_dir, 'code-coverage-reports') self.client_id = client_id self.access_token = access_token self.index_service = get_service('index', client_id, access_token) self.githubUtils = GitHubUtils(cache_root, client_id, access_token) if revision is None: # Retrieve revision of latest codecov build self.github_revision = uploader.get_latest_codecov() self.repository = MOZILLA_CENTRAL_REPOSITORY self.revision = self.githubUtils.git_to_mercurial( self.github_revision) self.from_pulse = False else: self.github_revision = None self.repository = repository self.revision = revision self.from_pulse = True branch = self.repository[len(HG_BASE):] assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format( cache_root) self.repo_dir = os.path.join(cache_root, branch) logger.info('Mercurial revision', revision=self.revision) task_ids = {} for platform in [ 'linux', 'windows', 'android-test', 'android-emulator' ]: task = taskcluster.get_task(branch, self.revision, platform) # On try, developers might have requested to run only one platform, and we trust them. # On mozilla-central, we want to assert that every platform was run. if task is not None: task_ids[platform] = task elif self.repository == MOZILLA_CENTRAL_REPOSITORY: raise Exception( 'Code coverage build failed and was not indexed.') self.artifactsHandler = ArtifactsHandler(task_ids, self.artifacts_dir) def clone_repository(self, repository, revision): cmd = hglib.util.cmdbuilder( 'robustcheckout', repository, self.repo_dir, purge=True, sharebase='hg-shared', upstream='https://hg.mozilla.org/mozilla-unified', revision=revision, networkattempts=7) cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) logger.info('{} cloned'.format(repository)) def retrieve_source_and_artifacts(self): with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone repository. executor.submit(self.clone_repository, self.repository, self.revision) # This function is executed when the bot is triggered at the end of a mozilla-central build. def go_from_trigger_mozilla_central(self): commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass self.retrieve_source_and_artifacts() self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') report = json.loads(output) expected_extensions = ['.js', '.cpp'] for extension in expected_extensions: assert any( f['name'].endswith(extension) for f in report['source_files'] ), 'No {} file in the generated report'.format(extension) logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(report) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token) notifier.notify() else: logger.error('codecov.io took too much time to ingest data.') # This function is executed when the bot is triggered at the end of a try build. def go_from_trigger_try(self): phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets( self.revision)['changesets'] if not any( phabricatorUploader.parse_revision_id(changeset['desc']) is not None for changeset in changesets): logger.info( 'None of the commits in the try push are linked to a Phabricator revision' ) return self.retrieve_source_and_artifacts() output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number='SERVICE_NUMBER', commit_sha='COMMIT_SHA', token='TOKEN', ) logger.info('Report generated successfully') logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader.upload(json.loads(output), changesets) # This function is executed when the bot is triggered via cron. def go_from_cron(self): self.retrieve_source_and_artifacts() logger.info('Generating suite reports') os.makedirs(self.ccov_reports_dir, exist_ok=True) suite_reports.generate(self.suites, self.artifactsHandler, self.ccov_reports_dir, self.repo_dir) logger.info('Generating zero coverage reports') zc = ZeroCov(self.repo_dir) zc.generate(self.artifactsHandler.get(), self.revision, self.github_revision) logger.info('Generating chunk mapping') chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler) # Index the task in the TaskCluster index at the given revision and as "latest". # Given that all tasks have the same rank, the latest task that finishes will # overwrite the "latest" entry. namespaces = [ 'project.releng.services.project.{}.code_coverage_bot.{}'.format( secrets[secrets.APP_CHANNEL], self.revision), 'project.releng.services.project.{}.code_coverage_bot.latest'. format(secrets[secrets.APP_CHANNEL]), ] for namespace in namespaces: self.index_service.insertTask( namespace, { 'taskId': os.environ['TASK_ID'], 'rank': 0, 'data': {}, 'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'), }) os.chdir(self.ccov_reports_dir) self.githubUtils.update_codecoveragereports_repo() def go(self): if not self.from_pulse: self.go_from_cron() elif self.repository == TRY_REPOSITORY: self.go_from_trigger_try() elif self.repository == MOZILLA_CENTRAL_REPOSITORY: self.go_from_trigger_mozilla_central() else: assert False, 'We shouldn\'t be here!'
class CodeCov(object): def __init__(self, repository, revision, cache_root, client_id, access_token): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = [ 'web-platform-tests', ] self.cache_root = cache_root temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, 'ccov-artifacts') self.ccov_reports_dir = os.path.join(temp_dir, 'code-coverage-reports') self.client_id = client_id self.access_token = access_token self.index_service = get_service('index', client_id, access_token) self.githubUtils = GitHubUtils(cache_root, client_id, access_token) if revision is None: # Retrieve revision of latest codecov build self.github_revision = uploader.get_latest_codecov() self.repository = MOZILLA_CENTRAL_REPOSITORY self.revision = self.githubUtils.git_to_mercurial(self.github_revision) self.from_pulse = False else: self.github_revision = None self.repository = repository self.revision = revision self.from_pulse = True branch = self.repository[len(HG_BASE):] assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format(cache_root) self.repo_dir = os.path.join(cache_root, branch) logger.info('Mercurial revision', revision=self.revision) task_ids = {} for platform in ['linux', 'windows', 'android-test', 'android-emulator']: task = taskcluster.get_task(branch, self.revision, platform) # On try, developers might have requested to run only one platform, and we trust them. # On mozilla-central, we want to assert that every platform was run (except for android platforms # as they are unstable). if task is not None: task_ids[platform] = task elif self.repository == MOZILLA_CENTRAL_REPOSITORY and not platform.startswith('android'): raise Exception('Code coverage build failed and was not indexed.') self.artifactsHandler = ArtifactsHandler(task_ids, self.artifacts_dir) def clone_repository(self, repository, revision): cmd = hglib.util.cmdbuilder('robustcheckout', repository, self.repo_dir, purge=True, sharebase='hg-shared', upstream='https://hg.mozilla.org/mozilla-unified', revision=revision, networkattempts=7) cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) logger.info('{} cloned'.format(repository)) def retrieve_source_and_artifacts(self): with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone repository. executor.submit(self.clone_repository, self.repository, self.revision) # This function is executed when the bot is triggered at the end of a mozilla-central build. def go_from_trigger_mozilla_central(self): commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass self.retrieve_source_and_artifacts() self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] # Check that all JavaScript files present in the coverage artifacts actually exist. # If they don't, there might be a bug in the LCOV rewriter. for artifact in self.artifactsHandler.get(): if 'jsvm' not in artifact: continue with zipfile.ZipFile(artifact, 'r') as zf: for file_name in zf.namelist(): with zf.open(file_name, 'r') as fl: source_files = [line[3:].decode('utf-8').rstrip() for line in fl if line.startswith(b'SF:')] missing_files = [f for f in source_files if not os.path.exists(os.path.join(self.repo_dir, f))] if len(missing_files) != 0: logger.warn(f'{missing_files} are present in coverage reports, but missing from the repository') output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN] ) logger.info('Report generated successfully') report = json.loads(output) expected_extensions = ['.js', '.cpp'] for extension in expected_extensions: assert any(f['name'].endswith(extension) for f in report['source_files']), 'No {} file in the generated report'.format(extension) logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(report) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token) notifier.notify() else: logger.error('codecov.io took too much time to ingest data.') # This function is executed when the bot is triggered at the end of a try build. def go_from_trigger_try(self): phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets(self.revision) if not any(phabricatorUploader.parse_revision_id(changeset['desc']) is not None for changeset in changesets): logger.info('None of the commits in the try push are linked to a Phabricator revision') return self.retrieve_source_and_artifacts() output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number='SERVICE_NUMBER', commit_sha='COMMIT_SHA', token='TOKEN', ) logger.info('Report generated successfully') logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader.upload(json.loads(output), changesets) # This function is executed when the bot is triggered via cron. def go_from_cron(self): self.retrieve_source_and_artifacts() logger.info('Generating suite reports') os.makedirs(self.ccov_reports_dir, exist_ok=True) suite_reports.generate(self.suites, self.artifactsHandler, self.ccov_reports_dir, self.repo_dir) logger.info('Generating zero coverage reports') zc = ZeroCov(self.repo_dir) zc.generate(self.artifactsHandler.get(), self.revision, self.github_revision) logger.info('Generating chunk mapping') chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler) # Index the task in the TaskCluster index at the given revision and as "latest". # Given that all tasks have the same rank, the latest task that finishes will # overwrite the "latest" entry. namespaces = [ 'project.releng.services.project.{}.code_coverage_bot.{}'.format(secrets[secrets.APP_CHANNEL], self.revision), 'project.releng.services.project.{}.code_coverage_bot.latest'.format(secrets[secrets.APP_CHANNEL]), ] for namespace in namespaces: self.index_service.insertTask( namespace, { 'taskId': os.environ['TASK_ID'], 'rank': 0, 'data': {}, 'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'), } ) os.chdir(self.ccov_reports_dir) self.githubUtils.update_codecoveragereports_repo() def go(self): if not self.from_pulse: self.go_from_cron() elif self.repository == TRY_REPOSITORY: self.go_from_trigger_try() elif self.repository == MOZILLA_CENTRAL_REPOSITORY: self.go_from_trigger_mozilla_central() else: assert False, 'We shouldn\'t be here!'
class CodeCov(object): def __init__(self, revision, cache_root, client_id, access_token): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = [ 'web-platform-tests', ] self.cache_root = cache_root assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format( cache_root) self.repo_dir = os.path.join(cache_root, 'mozilla-central') temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, 'ccov-artifacts') self.ccov_reports_dir = os.path.join(temp_dir, 'code-coverage-reports') self.client_id = client_id self.access_token = access_token self.index_service = get_service('index', client_id, access_token) self.githubUtils = GitHubUtils(cache_root, client_id, access_token) if revision is None: # Retrieve revision of latest codecov build self.github_revision = uploader.get_latest_codecov() self.revision = self.githubUtils.git_to_mercurial( self.github_revision) self.from_pulse = False else: self.github_revision = None self.revision = revision self.from_pulse = True self.notifier = Notifier(self.repo_dir, revision, client_id, access_token) logger.info('Mercurial revision', revision=self.revision) task_ids = { 'linux': taskcluster.get_task('mozilla-central', self.revision, 'linux'), 'windows': taskcluster.get_task('mozilla-central', self.revision, 'win'), 'android-test': taskcluster.get_task('mozilla-central', self.revision, 'android-test'), 'android-emulator': taskcluster.get_task('mozilla-central', self.revision, 'android-emulator'), } self.artifactsHandler = ArtifactsHandler(task_ids, self.artifacts_dir) def clone_mozilla_central(self, revision): shared_dir = self.repo_dir + '-shared' cmd = hglib.util.cmdbuilder('robustcheckout', 'https://hg.mozilla.org/mozilla-central', self.repo_dir, purge=True, sharebase=shared_dir, revision=revision, networkattempts=7) cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) logger.info('mozilla-central cloned') def go(self): if self.from_pulse: commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone mozilla-central. executor.submit(self.clone_mozilla_central, self.revision) if self.from_pulse: self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get( 'https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(json.loads(output)) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') self.notifier.notify() else: logger.error('codecov.io took too much time to ingest data.') else: logger.info('Generating suite reports') os.makedirs(self.ccov_reports_dir, exist_ok=True) suite_reports.generate(self.suites, self.artifactsHandler, self.ccov_reports_dir, self.repo_dir) logger.info('Generating zero coverage reports') zc = ZeroCov(self.repo_dir) zc.generate(self.artifactsHandler.get(), self.revision, self.github_revision) logger.info('Generating chunk mapping') chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler) # Index the task in the TaskCluster index at the given revision and as "latest". # Given that all tasks have the same rank, the latest task that finishes will # overwrite the "latest" entry. namespaces = [ 'project.releng.services.project.{}.code_coverage_bot.{}'. format(secrets[secrets.APP_CHANNEL], self.revision), 'project.releng.services.project.{}.code_coverage_bot.latest'. format(secrets[secrets.APP_CHANNEL]), ] for namespace in namespaces: self.index_service.insertTask( namespace, { 'taskId': os.environ['TASK_ID'], 'rank': 0, 'data': {}, 'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'), }) os.chdir(self.ccov_reports_dir) self.githubUtils.update_codecoveragereports_repo()
def test_get_combinations(tmpdir, fake_artifacts): def add_dir(files): return [os.path.join(tmpdir.strpath, f) for f in files] a = ArtifactsHandler([]) a.artifacts = fake_artifacts assert dict(a.get_combinations()) == { ("all", "all"): add_dir( [ "windows_mochitest-1_code-coverage-jsvm.info", "linux_mochitest-2_code-coverage-grcov.zip", "windows_xpcshell-7_code-coverage-jsvm.info", "linux_xpcshell-7_code-coverage-grcov.zip", "linux_xpcshell-3_code-coverage-grcov.zip", "windows_cppunit_code-coverage-grcov.zip", "linux_firefox-ui-functional-remote_code-coverage-jsvm.info", ] ), ("linux", "all"): add_dir( [ "linux_firefox-ui-functional-remote_code-coverage-jsvm.info", "linux_mochitest-2_code-coverage-grcov.zip", "linux_xpcshell-7_code-coverage-grcov.zip", "linux_xpcshell-3_code-coverage-grcov.zip", ] ), ("windows", "all"): add_dir( [ "windows_cppunit_code-coverage-grcov.zip", "windows_mochitest-1_code-coverage-jsvm.info", "windows_xpcshell-7_code-coverage-jsvm.info", ] ), ("all", "cppunit"): add_dir(["windows_cppunit_code-coverage-grcov.zip"]), ("windows", "cppunit"): add_dir(["windows_cppunit_code-coverage-grcov.zip"]), ("all", "firefox-ui-functional"): add_dir( ["linux_firefox-ui-functional-remote_code-coverage-jsvm.info"] ), ("linux", "firefox-ui-functional"): add_dir( ["linux_firefox-ui-functional-remote_code-coverage-jsvm.info"] ), ("all", "mochitest"): add_dir( [ "windows_mochitest-1_code-coverage-jsvm.info", "linux_mochitest-2_code-coverage-grcov.zip", ] ), ("linux", "mochitest"): add_dir(["linux_mochitest-2_code-coverage-grcov.zip"]), ("windows", "mochitest"): add_dir( ["windows_mochitest-1_code-coverage-jsvm.info"] ), ("all", "xpcshell"): add_dir( [ "windows_xpcshell-7_code-coverage-jsvm.info", "linux_xpcshell-7_code-coverage-grcov.zip", "linux_xpcshell-3_code-coverage-grcov.zip", ] ), ("linux", "xpcshell"): add_dir( [ "linux_xpcshell-7_code-coverage-grcov.zip", "linux_xpcshell-3_code-coverage-grcov.zip", ] ), ("windows", "xpcshell"): add_dir( ["windows_xpcshell-7_code-coverage-jsvm.info"] ), }
class CodeCov(object): def __init__(self, repository, revision, task_name_filter, cache_root): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = ["web-platform-tests"] self.cache_root = cache_root temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, "ccov-artifacts") self.index_service = taskcluster_config.get_service("index") if revision is None: # Retrieve latest ingested revision self.repository = MOZILLA_CENTRAL_REPOSITORY try: self.revision = uploader.gcp_latest("mozilla-central")[0]["revision"] except Exception as e: logger.warn( "Failed to retrieve the latest reports ingested: {}".format(e) ) raise self.from_pulse = False else: self.repository = repository self.revision = revision self.from_pulse = True self.branch = self.repository[len(HG_BASE) :] assert os.path.isdir(cache_root), "Cache root {} is not a dir.".format( cache_root ) self.repo_dir = os.path.join(cache_root, self.branch) logger.info("Mercurial revision", revision=self.revision) task_ids = {} for platform in ["linux", "windows", "android-test", "android-emulator"]: task = taskcluster.get_task(self.branch, self.revision, platform) # On try, developers might have requested to run only one platform, and we trust them. # On mozilla-central, we want to assert that every platform was run (except for android platforms # as they are unstable). if task is not None: task_ids[platform] = task elif ( self.repository == MOZILLA_CENTRAL_REPOSITORY and not platform.startswith("android") ): raise Exception("Code coverage build failed and was not indexed.") self.artifactsHandler = ArtifactsHandler( task_ids, self.artifacts_dir, task_name_filter ) def clone_repository(self, repository, revision): cmd = hglib.util.cmdbuilder( "robustcheckout", repository, self.repo_dir, purge=True, sharebase="hg-shared", upstream="https://hg.mozilla.org/mozilla-unified", revision=revision, networkattempts=7, ) cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) logger.info("{} cloned".format(repository)) def retrieve_source_and_artifacts(self): with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone repository. executor.submit(self.clone_repository, self.repository, self.revision) def generate_covdir(self): """ Build the covdir report using current artifacts """ output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, out_format="covdir" ) logger.info("Covdir report generated successfully") return json.loads(output) # This function is executed when the bot is triggered at the end of a mozilla-central build. def go_from_trigger_mozilla_central(self): # Check the covdir report does not already exists if uploader.gcp_covdir_exists(self.branch, self.revision): logger.warn("Covdir report already on GCP") return self.retrieve_source_and_artifacts() # Check that all JavaScript files present in the coverage artifacts actually exist. # If they don't, there might be a bug in the LCOV rewriter. for artifact in self.artifactsHandler.get(): if "jsvm" not in artifact: continue with zipfile.ZipFile(artifact, "r") as zf: for file_name in zf.namelist(): with zf.open(file_name, "r") as fl: source_files = [ line[3:].decode("utf-8").rstrip() for line in fl if line.startswith(b"SF:") ] missing_files = [ f for f in source_files if not os.path.exists(os.path.join(self.repo_dir, f)) ] if len(missing_files) != 0: logger.warn( f"{missing_files} are present in coverage reports, but missing from the repository" ) report = self.generate_covdir() paths = uploader.covdir_paths(report) expected_extensions = [".js", ".cpp"] for extension in expected_extensions: assert any( path.endswith(extension) for path in paths ), "No {} file in the generated report".format(extension) # Get pushlog and ask the backend to generate the coverage by changeset # data, which will be cached. with hgmo.HGMO(self.repo_dir) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets(self.revision) logger.info("Upload changeset coverage data to Phabricator") phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) changesets_coverage = phabricatorUploader.upload(report, changesets) uploader.gcp(self.branch, self.revision, report) logger.info("Build uploaded on GCP") notify_email(self.revision, changesets, changesets_coverage) # This function is executed when the bot is triggered at the end of a try build. def go_from_trigger_try(self): phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets(self.revision) if not any( parse_revision_id(changeset["desc"]) is not None for changeset in changesets ): logger.info( "None of the commits in the try push are linked to a Phabricator revision" ) return self.retrieve_source_and_artifacts() report = self.generate_covdir() logger.info("Upload changeset coverage data to Phabricator") phabricatorUploader.upload(report, changesets) # This function is executed when the bot is triggered via cron. def go_from_cron(self): self.retrieve_source_and_artifacts() logger.info("Generating zero coverage reports") zc = ZeroCov(self.repo_dir) zc.generate(self.artifactsHandler.get(), self.revision) logger.info("Generating chunk mapping") chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler) # Index the task in the TaskCluster index at the given revision and as "latest". # Given that all tasks have the same rank, the latest task that finishes will # overwrite the "latest" entry. namespaces = [ "project.releng.services.project.{}.code_coverage_bot.{}".format( secrets[secrets.APP_CHANNEL], self.revision ), "project.releng.services.project.{}.code_coverage_bot.latest".format( secrets[secrets.APP_CHANNEL] ), ] for namespace in namespaces: self.index_service.insertTask( namespace, { "taskId": os.environ["TASK_ID"], "rank": 0, "data": {}, "expires": (datetime.utcnow() + timedelta(180)).strftime( "%Y-%m-%dT%H:%M:%S.%fZ" ), }, ) def go(self): if not self.from_pulse: self.go_from_cron() elif self.repository == TRY_REPOSITORY: self.go_from_trigger_try() elif self.repository == MOZILLA_CENTRAL_REPOSITORY: self.go_from_trigger_mozilla_central() else: assert False, "We shouldn't be here!"