def test_get_combinations(tmpdir, fake_artifacts): def add_dir(files): return [os.path.join(tmpdir.strpath, f) for f in files] a = ArtifactsHandler([]) a.artifacts = fake_artifacts assert dict(a.get_combinations()) == { ("all", "all"): add_dir( [ "windows_mochitest-1_code-coverage-jsvm.info", "linux_mochitest-2_code-coverage-grcov.zip", "windows_xpcshell-7_code-coverage-jsvm.info", "linux_xpcshell-7_code-coverage-grcov.zip", "linux_xpcshell-3_code-coverage-grcov.zip", "windows_cppunit_code-coverage-grcov.zip", "linux_firefox-ui-functional-remote_code-coverage-jsvm.info", ] ), ("linux", "all"): add_dir( [ "linux_firefox-ui-functional-remote_code-coverage-jsvm.info", "linux_mochitest-2_code-coverage-grcov.zip", "linux_xpcshell-7_code-coverage-grcov.zip", "linux_xpcshell-3_code-coverage-grcov.zip", ] ), ("windows", "all"): add_dir( [ "windows_cppunit_code-coverage-grcov.zip", "windows_mochitest-1_code-coverage-jsvm.info", "windows_xpcshell-7_code-coverage-jsvm.info", ] ), ("all", "cppunit"): add_dir(["windows_cppunit_code-coverage-grcov.zip"]), ("windows", "cppunit"): add_dir(["windows_cppunit_code-coverage-grcov.zip"]), ("all", "firefox-ui-functional"): add_dir( ["linux_firefox-ui-functional-remote_code-coverage-jsvm.info"] ), ("linux", "firefox-ui-functional"): add_dir( ["linux_firefox-ui-functional-remote_code-coverage-jsvm.info"] ), ("all", "mochitest"): add_dir( [ "windows_mochitest-1_code-coverage-jsvm.info", "linux_mochitest-2_code-coverage-grcov.zip", ] ), ("linux", "mochitest"): add_dir(["linux_mochitest-2_code-coverage-grcov.zip"]), ("windows", "mochitest"): add_dir( ["windows_mochitest-1_code-coverage-jsvm.info"] ), ("all", "xpcshell"): add_dir( [ "windows_xpcshell-7_code-coverage-jsvm.info", "linux_xpcshell-7_code-coverage-grcov.zip", "linux_xpcshell-3_code-coverage-grcov.zip", ] ), ("linux", "xpcshell"): add_dir( [ "linux_xpcshell-7_code-coverage-grcov.zip", "linux_xpcshell-3_code-coverage-grcov.zip", ] ), ("windows", "xpcshell"): add_dir( ["windows_xpcshell-7_code-coverage-jsvm.info"] ), }
class Hook(object): def __init__( self, repository, revision, task_name_filter, cache_root, working_dir, required_platforms=[], ): os.makedirs(working_dir, exist_ok=True) self.artifacts_dir = os.path.join(working_dir, "ccov-artifacts") self.reports_dir = os.path.join(working_dir, "ccov-reports") logger.info( "Local storage initialized.", artifacts=self.artifacts_dir, reports=self.reports_dir, ) self.repository = repository self.revision = revision assert (self.revision is not None and self.repository is not None), "Missing repo/revision" logger.info("Mercurial setup", repository=self.repository, revision=self.revision) assert os.path.isdir( cache_root), f"Cache root {cache_root} is not a dir." self.repo_dir = os.path.join(cache_root, self.branch) # Load current coverage task for all platforms task_ids = { platform: taskcluster.get_task(self.branch, self.revision, platform) for platform in PLATFORMS } # Check the required platforms are present for platform in required_platforms: if not task_ids[platform]: raise Exception( f"Code coverage build on {platform} failed and was not indexed." ) self.artifactsHandler = ArtifactsHandler(task_ids, self.artifacts_dir, task_name_filter) @property def branch(self): return self.repository[len(config.HG_BASE):] def clone_repository(self): cmd = hglib.util.cmdbuilder( "robustcheckout", self.repository, self.repo_dir, purge=True, sharebase="hg-shared", upstream="https://hg.mozilla.org/mozilla-unified", revision=self.revision, networkattempts=7, ) cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) logger.info("{} cloned".format(self.repository)) def retrieve_source_and_artifacts(self): with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone repository. executor.submit(self.clone_repository) def build_reports(self, only=None): """ Build all the possible covdir reports using current artifacts """ os.makedirs(self.reports_dir, exist_ok=True) reports = {} for ( (platform, suite), artifacts, ) in self.artifactsHandler.get_combinations().items(): if only is not None and (platform, suite) not in only: continue # Generate covdir report for that suite & platform logger.info( "Building covdir suite report", suite=suite, platform=platform, artifacts=len(artifacts), ) output = grcov.report(artifacts, source_dir=self.repo_dir, out_format="covdir") # Write output on FS path = os.path.join(self.reports_dir, f"{platform}.{suite}.json") with open(path, "wb") as f: f.write(output) reports[(platform, suite)] = path return reports def index_task(self, namespaces, ttl=180): """ Index current task on Taskcluster Index TTL is expressed in days """ assert isinstance(ttl, int) and ttl > 0 task_id = os.environ.get("TASK_ID") if task_id is None: logger.warning( "Skipping Taskcluster indexation, no task id found.") return index_service = taskcluster_config.get_service("index") for namespace in namespaces: index_service.insertTask( namespace, { "taskId": task_id, "rank": 0, "data": {}, "expires": (datetime.utcnow() + timedelta(ttl)).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), }, )