def test_coverable_last_lines(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1] }]}) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCNN", "lines_added": 4, "lines_covered": 3, "lines_unknown": 0, } }, } }
def get_pushlog(self): with hgmo.HGMO(self.repo_dir) as hgmo_server: pushlog = hgmo_server.get_pushes(startID=0) logger.info("Pushlog retrieved") return pushlog
def go_from_trigger_try(self): phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets( self.revision)['changesets'] if not any( phabricatorUploader.parse_revision_id(changeset['desc']) is not None for changeset in changesets): logger.info( 'None of the commits in the try push are linked to a Phabricator revision' ) return self.retrieve_source_and_artifacts() output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number='SERVICE_NUMBER', commit_sha='COMMIT_SHA', token='TOKEN', ) logger.info('Report generated successfully') logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader.upload(json.loads(output), changesets)
def test_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision2 = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({"source_files": []}) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": {} }, revision2: { "revision_id": None, "paths": {} }, }
def get_hgmo_changesets(self): """ Build HGMO changesets according to this repo's configuration """ with hgmo.HGMO(server_address=self.repository) as hgmo_server: return hgmo_server.get_automation_relevance_changesets( self.revision)
def notify(self): content = '' # Get pushlog and ask the backend to generate the coverage by changeset # data, which will be cached. with hgmo.HGMO(self.repo_dir) as url: url += '/json-pushes' r = requests.get(url, params={ 'changeset': self.revision, 'version': 2, 'full': 1 }) r.raise_for_status() push_data = r.json() changesets = sum( (data['changesets'] for data in push_data['pushes'].values()), []) for changeset in changesets: desc = changeset['desc'].split('\n')[0] if any(text in desc for text in ['r=merge', 'a=merge']): continue rev = changeset['node'] try: coverage = retry(lambda: self.get_coverage_summary(rev)) except (requests.exceptions.HTTPError, ResultNotReadyException): logger.warn('Failure to retrieve coverage summary') continue if coverage['commit_covered'] < 0.2 * coverage['commit_added']: content += '* [{}](https://firefox-code-coverage.herokuapp.com/#/changeset/{}): {} covered out of {} added.\n'.format( desc, rev, coverage['commit_covered'], coverage['commit_added']) # noqa if content == '': return elif len(content) > 102400: # Content is 102400 chars max content = content[:102000] + '\n\n... Content max limit reached!' for email in secrets[secrets.EMAIL_ADDRESSES]: self.notify_service.email({ 'address': email, 'subject': 'Coverage patches for {}'.format(self.revision), 'content': content, 'template': 'fullscreen', })
def go_from_trigger_mozilla_central(self): # Check the covdir report does not already exists if uploader.gcp_covdir_exists(self.branch, self.revision): logger.warn("Covdir report already on GCP") return self.retrieve_source_and_artifacts() # Check that all JavaScript files present in the coverage artifacts actually exist. # If they don't, there might be a bug in the LCOV rewriter. for artifact in self.artifactsHandler.get(): if "jsvm" not in artifact: continue with zipfile.ZipFile(artifact, "r") as zf: for file_name in zf.namelist(): with zf.open(file_name, "r") as fl: source_files = [ line[3:].decode("utf-8").rstrip() for line in fl if line.startswith(b"SF:") ] missing_files = [ f for f in source_files if not os.path.exists(os.path.join(self.repo_dir, f)) ] if len(missing_files) != 0: logger.warn( f"{missing_files} are present in coverage reports, but missing from the repository" ) report = self.generate_covdir() paths = uploader.covdir_paths(report) expected_extensions = [".js", ".cpp"] for extension in expected_extensions: assert any( path.endswith(extension) for path in paths ), "No {} file in the generated report".format(extension) # Get pushlog and ask the backend to generate the coverage by changeset # data, which will be cached. with hgmo.HGMO(self.repo_dir) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets(self.revision) logger.info("Upload changeset coverage data to Phabricator") phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) changesets_coverage = phabricatorUploader.upload(report, changesets) uploader.gcp(self.branch, self.revision, report) logger.info("Build uploaded on GCP") notify_email(self.revision, changesets, changesets_coverage)
def test_changesets_overwriting_one_commit_without_differential( mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n42\n5\n6\n7\n") revision2 = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file": { "coverage": "NUCXCCU", "lines_added": 6, "lines_covered": 3, "lines_unknown": 1, } }, }, revision2: { "revision_id": None, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 1, "lines_covered": 1, "lines_unknown": 0, } }, }, }
def generate(self, report, changesets=None): results = {} with hgmo.HGMO(self.repo_dir) as hgmo_server: if changesets is None: changesets = hgmo_server.get_automation_relevance_changesets( self.revision) for changeset in changesets: # Retrieve the revision ID for this changeset. revision_id = self.parse_revision_id(changeset['desc']) if revision_id is None: continue results[revision_id] = {} # For each file... for path in changeset['files']: # Retrieve the coverage data. coverage_record = self._find_coverage(report, path) if coverage_record is None: continue # Retrieve the annotate data for the build changeset. build_annotate = hgmo_server.get_annotate( self.revision, path) if build_annotate is None: # This means the file has been removed by another changeset, but if this is the # case, then we shouldn't have a coverage record and so we should have *continue*d # earlier. assert False, 'Failure to retrieve annotate data for the build changeset' # Build the coverage map from the annotate data and the coverage data of the build changeset. coverage_map = self._build_coverage_map( build_annotate, coverage_record) # Retrieve the annotate data for the changeset of interest. annotate = hgmo_server.get_annotate( changeset['node'], path) if annotate is None: # This means the file has been removed by this changeset, and maybe was brought back by a following changeset. continue # Apply the coverage map on the annotate data of the changeset of interest. results[revision_id][path] = self._apply_coverage_map( annotate, coverage_map) return results
def test_backout_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision2 = commit(hg, 2) hg.backout(rev=revision2, message=f"Backout {revision2[:12]}", user="******") revision3 = hg.log(limit=1)[0][1].decode("ascii") hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision3) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision3) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, } }, }, revision2: { "revision_id": 2, "paths": {} }, }
def get_pushlog(self): with hgmo.HGMO(self.repo_dir) as url: url += '/json-pushes' logger.info('Get pushlog', url=url) r = requests.get(url, params={ 'startID': 0, 'version': 2, 'full': 1 }) if not r.ok: logger.error('Pushlog cannot be retrieved', url=r.url, status_code=r.status_code) return {} logger.info('Pushlog retrieved') return r.json()
def go_from_trigger_try(self): phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets(self.revision) if not any( parse_revision_id(changeset["desc"]) is not None for changeset in changesets ): logger.info( "None of the commits in the try push are linked to a Phabricator revision" ) return self.retrieve_source_and_artifacts() report = self.generate_covdir() logger.info("Upload changeset coverage data to Phabricator") phabricatorUploader.upload(report, changesets)
def analyze_changeset(changeset_to_analyze: str) -> None: report_name = get_name( "mozilla-central", changeset_to_analyze, DEFAULT_FILTER, DEFAULT_FILTER ) assert download_report( os.path.join(out_dir, "ccov-reports"), bucket, report_name ) with open( os.path.join(out_dir, "ccov-reports", f"{report_name}.json"), "r" ) as f: report = json.load(f) phabricatorUploader = PhabricatorUploader( repo_dir, changeset_to_analyze, warnings_enabled=False ) # Use the hg.mozilla.org server to get the automation relevant changesets, since # this information is broken in our local repo (which mozilla-unified). with hgmo.HGMO(server_address=server_address) as hgmo_remote_server: changesets = hgmo_remote_server.get_automation_relevance_changesets( changeset_to_analyze ) results = phabricatorUploader.generate(thread_local.hg, report, changesets) for changeset in changesets: # Lookup changeset coverage from phabricator uploader coverage = results.get(changeset["node"]) if coverage is None: logger.info("No coverage found", changeset=changeset) commit_coverage[changeset["node"]] = None continue commit_coverage[changeset["node"]] = { "added": sum(c["lines_added"] for c in coverage["paths"].values()), "covered": sum(c["lines_covered"] for c in coverage["paths"].values()), "unknown": sum(c["lines_unknown"] for c in coverage["paths"].values()), }
def changesets(repo_dir, revision): from code_coverage_bot import hgmo with hgmo.HGMO(repo_dir) as hgmo_server: return hgmo_server.get_automation_relevance_changesets(revision)
def generate(self, report, changesets): results = {} with hgmo.HGMO(self.repo_dir) as hgmo_server: for changeset in changesets: # Retrieve the revision ID for this changeset. revision_id = parse_revision_id(changeset["desc"]) if revision_id is None: continue results[revision_id] = {} # For each file... for path in changeset["files"]: # Retrieve the coverage data. coverage_record = self._find_coverage(report, path) if coverage_record is None: continue # Retrieve the annotate data for the build changeset. build_annotate = hgmo_server.get_annotate( self.revision, path) if build_annotate is None: # This means the file has been removed by another changeset, but if this is the # case, then we shouldn't have a coverage record and so we should have *continue*d # earlier. assert ( False ), "Failure to retrieve annotate data for the build changeset" # Build the coverage map from the annotate data and the coverage data of the build changeset. coverage_map = self._build_coverage_map( build_annotate, coverage_record) # Retrieve the annotate data for the changeset of interest. annotate = hgmo_server.get_annotate( changeset["node"], path) if annotate is None: # This means the file has been removed by this changeset, and maybe was brought back by a following changeset. continue # List lines added by this patch lines_added = [ line["lineno"] for line in build_annotate if line["node"] == changeset["node"] ] # Apply the coverage map on the annotate data of the changeset of interest. coverage = self._apply_coverage_map(annotate, coverage_map) results[revision_id][path] = { "lines_added": len(lines_added), "lines_covered": sum(coverage[line - 1] in ("C", "X", "N") for line in lines_added if line - 1 < len(coverage)), "coverage": coverage, } return results
def test_generate_two_pushes(monkeypatch, tmpdir, mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): tmp_path = tmpdir.strpath hg, local, remote = fake_hg_repo add_file(hg, local, "file1", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file1", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) add_file(hg, local, "file2", "1\n2\n3\n4\n") revision3 = commit(hg, 1) add_file(hg, local, "file2", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision4 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) report1 = covdir_report({ "source_files": [{ "name": "file1", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0] }] }) report2 = covdir_report({ "source_files": [ { "name": "file1", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0] }, { "name": "file2", "coverage": [None, 0, 1, 0, 0, 0, 0, 0, 1, 1] }, ] }) uploaded_data = None patch_calls = 0 class Blob: def exists(self): return False def upload_from_string(self, val): nonlocal uploaded_data uploaded_data = val def download_as_bytes(self): assert False def patch(self): nonlocal patch_calls assert self.content_type == "application/json" assert self.content_encoding == "zstd" patch_calls += 1 class Bucket: def blob(self, path): assert path == "commit_coverage.json.zst" return Blob() myBucket = Bucket() def get_bucket(acc): return myBucket monkeypatch.setattr(commit_coverage, "get_bucket", get_bucket) def list_reports(bucket, repo): assert bucket == myBucket assert repo == "mozilla-central" yield revision2, "linux", "all" yield revision2, "all", "xpcshell" yield revision2, "all", "all" yield revision4, "all", "all" monkeypatch.setattr(commit_coverage, "list_reports", list_reports) def download_report(report_dir, bucket, report_name): if revision2 in report_name: os.makedirs( os.path.join(tmp_path, report_dir, "mozilla-central", revision2), exist_ok=True, ) with open( os.path.join(tmp_path, report_dir, "mozilla-central", revision2, "all:all.json"), "w", ) as f: json.dump(report1, f) if revision4 in report_name: os.makedirs( os.path.join(tmp_path, report_dir, "mozilla-central", revision4), exist_ok=True, ) with open( os.path.join(tmp_path, report_dir, "mozilla-central", revision4, "all:all.json"), "w", ) as f: json.dump(report2, f) return True monkeypatch.setattr(commit_coverage, "download_report", download_report) with hgmo.HGMO(repo_dir=local) as hgmo_server: lock = threading.Lock() class HGMOMock: def get_automation_relevance_changesets(self, changeset): with lock: return hgmo_server.get_automation_relevance_changesets( changeset) @contextmanager def HGMO(server_address=None, repo_dir=None): assert server_address == hgmo_server.server_address yield HGMOMock() monkeypatch.setattr(commit_coverage, "hgmo", HGMOMock) commit_coverage.generate(hgmo_server.server_address, local, out_dir=tmp_path) assert patch_calls == 1 dctx = zstandard.ZstdDecompressor() with open(os.path.join(tmp_path, "commit_coverage.json.zst"), "rb") as zf: with dctx.stream_reader(zf) as reader: result = json.load(reader) assert result == json.loads(dctx.decompress(uploaded_data)) assert result == { revision1: { "added": 3, "covered": 2, "unknown": 0, }, revision2: { "added": 6, "covered": 0, "unknown": 0, }, revision3: { "added": 3, "covered": 1, "unknown": 0, }, revision4: { "added": 6, "covered": 2, "unknown": 0, }, }
def test_trigger_from_preexisting(monkeypatch, tmpdir, mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): tmp_path = tmpdir.strpath hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) add_file(hg, local, "file2", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision3 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) add_file(hg, local, "file3", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision4 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) responses.add( responses.HEAD, "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/triggered_revisions.zst", status=200, ) responses.add( responses.GET, "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/triggered_revisions.zst", status=200, body=zstandard.ZstdCompressor().compress( f"{revision2}\n{revision3}".encode("ascii")), ) copy_pushlog_database(remote, local) myBucket = {} def get_bucket(acc): return myBucket monkeypatch.setattr(trigger_missing, "get_bucket", get_bucket) gcp_covdir_exists_calls = 0 def gcp_covdir_exists(bucket, repository, revision, platform, suite): nonlocal gcp_covdir_exists_calls gcp_covdir_exists_calls += 1 assert bucket == myBucket assert repository == "mozilla-central" assert platform == "all" assert suite == "all" return revision == revision3 monkeypatch.setattr(uploader, "gcp_covdir_exists", gcp_covdir_exists) def slugId(): return "myGroupId" monkeypatch.setattr(trigger_missing, "slugId", slugId) trigger_hook_calls = 0 def get_service(serv): assert serv == "hooks" class HooksService: def triggerHook(self, hook_group, hook_id, payload): nonlocal trigger_hook_calls assert hook_group == "project-relman" assert hook_id == "code-coverage-repo-production" assert payload == { "REPOSITORY": "https://hg.mozilla.org/mozilla-central", "REVISION": revision4, "taskGroupId": "myGroupId", "taskName": f"covdir for {revision4}", } trigger_hook_calls += 1 return HooksService() monkeypatch.setattr(taskcluster_config, "get_service", get_service) get_decision_task_calls = 0 def get_decision_task(branch, revision): nonlocal get_decision_task_calls assert branch == "mozilla-central" assert revision == revision4 get_decision_task_calls += 1 return f"decisionTask-{revision}" monkeypatch.setattr(taskcluster, "get_decision_task", get_decision_task) get_task_details_calls = 0 def get_task_details(decision_task_id): nonlocal get_task_details_calls assert decision_task_id == f"decisionTask-{revision4}" get_task_details_calls += 1 return {"taskGroupId": f"decisionTaskGroup-{revision4}"} monkeypatch.setattr(taskcluster, "get_task_details", get_task_details) get_tasks_in_group_calls = 0 def get_tasks_in_group(group_id): nonlocal get_tasks_in_group_calls assert group_id == f"decisionTaskGroup-{revision4}" get_tasks_in_group_calls += 1 return [{ "status": { "state": "completed", }, "task": { "metadata": { "name": "build-linux64-ccov/opt", } }, }] monkeypatch.setattr(taskcluster, "get_tasks_in_group", get_tasks_in_group) with hgmo.HGMO(repo_dir=local) as hgmo_server: trigger_missing.trigger_missing(hgmo_server.server_address, out_dir=tmp_path) assert gcp_covdir_exists_calls == 1 assert trigger_hook_calls == 1 assert get_decision_task_calls == 1 assert get_task_details_calls == 1 assert get_tasks_in_group_calls == 1 dctx = zstandard.ZstdDecompressor() with open(os.path.join(tmp_path, "triggered_revisions.zst"), "rb") as zf: with dctx.stream_reader(zf) as reader: with io.TextIOWrapper(reader, encoding="ascii") as f: result = set(rev for rev in f.read().splitlines()) assert result == {revision2, revision3, revision4}
def trigger_missing(server_address: str, out_dir: str = ".") -> None: triggered_revisions_path = os.path.join(out_dir, "triggered_revisions.zst") url = f"https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.{secrets[secrets.APP_CHANNEL]}.cron.latest/artifacts/public/triggered_revisions.zst" # noqa r = requests.head(url, allow_redirects=True) if r.status_code != 404: utils.download_file(url, triggered_revisions_path) try: dctx = zstandard.ZstdDecompressor() with open(triggered_revisions_path, "rb") as zf: with dctx.stream_reader(zf) as reader: with io.TextIOWrapper(reader, encoding="ascii") as f: triggered_revisions = set(rev for rev in f.read().splitlines()) except FileNotFoundError: triggered_revisions = set() # Get all mozilla-central revisions from the past year. days = 365 if secrets[secrets.APP_CHANNEL] == "production" else 30 a_year_ago = datetime.utcnow() - timedelta(days=days) with hgmo.HGMO(server_address=server_address) as hgmo_server: data = hgmo_server.get_pushes( startDate=a_year_ago.strftime("%Y-%m-%d"), full=False, tipsonly=True) revisions = [(push_data["changesets"][0], int(push_data["date"])) for push_data in data["pushes"].values()] logger.info(f"{len(revisions)} pushes in the past year") assert (secrets[secrets.GOOGLE_CLOUD_STORAGE] is not None), "Missing GOOGLE_CLOUD_STORAGE secret" bucket = get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE]) missing_revisions = [] for revision, timestamp in revisions: # Skip revisions that have already been triggered. If they are still missing, # it means there is a problem that is preventing us from ingesting them. if revision in triggered_revisions: continue # If the revision was already ingested, we don't need to trigger ingestion for it again. if uploader.gcp_covdir_exists(bucket, "mozilla-central", revision, "all", "all"): triggered_revisions.add(revision) continue missing_revisions.append((revision, timestamp)) logger.info(f"{len(missing_revisions)} missing pushes in the past year") yesterday = int(datetime.timestamp(datetime.utcnow() - timedelta(days=1))) task_group_id = slugId() logger.info(f"Triggering tasks in the {task_group_id} group") triggered = 0 for revision, timestamp in reversed(missing_revisions): # If it's older than yesterday, we assume the group finished. # If it is newer than yesterday, we load the group and check if all tasks in it finished. if timestamp > yesterday: decision_task_id = taskcluster.get_decision_task( "mozilla-central", revision) if decision_task_id is None: continue group = taskcluster.get_task_details( decision_task_id)["taskGroupId"] if not all(task["status"]["state"] in taskcluster.FINISHED_STATUSES for task in taskcluster.get_tasks_in_group(group) if taskcluster.is_coverage_task(task["task"])): continue trigger_task(task_group_id, revision) triggered_revisions.add(revision) triggered += 1 if triggered == MAXIMUM_TRIGGERS: break cctx = zstandard.ZstdCompressor(threads=-1) with open(triggered_revisions_path, "wb") as zf: with cctx.stream_writer(zf) as compressor: with io.TextIOWrapper(compressor, encoding="ascii") as f: f.write("\n".join(triggered_revisions))
def test_generate_from_preexisting(monkeypatch, tmpdir, mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): tmp_path = tmpdir.strpath hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) uploaded_data = None patch_calls = 0 class Blob: def exists(self): return True def upload_from_string(self, val): nonlocal uploaded_data uploaded_data = val def download_as_bytes(self): return zstandard.ZstdCompressor().compress( json.dumps({ "revision1": { "added": 7, "covered": 3, "unknown": 0, }, "revision2": None, }).encode("ascii")) def patch(self): nonlocal patch_calls assert self.content_type == "application/json" assert self.content_encoding == "zstd" patch_calls += 1 class Bucket: def blob(self, path): assert path == "commit_coverage.json.zst" return Blob() myBucket = Bucket() def get_bucket(acc): return myBucket monkeypatch.setattr(commit_coverage, "get_bucket", get_bucket) def list_reports(bucket, repo): assert bucket == myBucket assert repo == "mozilla-central" yield revision2, "linux", "all" yield revision2, "all", "xpcshell" yield revision2, "all", "all" monkeypatch.setattr(commit_coverage, "list_reports", list_reports) def download_report(report_dir, bucket, report_name): os.makedirs( os.path.join(tmp_path, report_dir, "mozilla-central", revision2), exist_ok=True, ) with open( os.path.join(tmp_path, report_dir, "mozilla-central", revision2, "all:all.json"), "w", ) as f: json.dump(report, f) return True monkeypatch.setattr(commit_coverage, "download_report", download_report) with hgmo.HGMO(repo_dir=local) as hgmo_server: commit_coverage.generate(hgmo_server.server_address, local, out_dir=tmp_path) assert patch_calls == 1 dctx = zstandard.ZstdDecompressor() with open(os.path.join(tmp_path, "commit_coverage.json.zst"), "rb") as zf: with dctx.stream_reader(zf) as reader: result = json.load(reader) assert result == json.loads(dctx.decompress(uploaded_data)) assert result == { "revision1": { "added": 7, "covered": 3, "unknown": 0 }, "revision2": None, revision1: { "added": 3, "covered": 2, "unknown": 0, }, revision2: { "added": 6, "covered": 0, "unknown": 0, }, }
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) assert len(stack) == 2 assert ( stack[0]["desc"] == "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1" ) assert ( stack[1]["desc"] == "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2" ) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) phab = PhabricatorUploader(local, revision2) with hglib.open(local) as hg: changesets_coverage = phab.generate(hg, report, stack) assert changesets_coverage == { revision1: { "revision_id": 1, "paths": { "file": { "lines_added": 3, "lines_covered": 2, "lines_unknown": 0, "coverage": "NCCU", } }, }, revision2: { "revision_id": 2, "paths": { "file": { "lines_added": 6, "lines_covered": 0, "lines_unknown": 0, "coverage": "NCCUUUUUUU", } }, }, } mail = notify_email(revision2, stack, changesets_coverage) assert ( mail == """* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://phabricator.services.mozilla.com/D2): 0 covered out of 6 added.\n""" )
def test_two_commits_two_files(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file1_commit1", "1\n2\n3\n4\n5\n6\n7\n") add_file(hg, local, "file2_commit1", "1\n2\n3\n") revision1 = commit(hg, 1) add_file(hg, local, "file3_commit2", "1\n2\n3\n4\n5\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({ "source_files": [ { "name": "file1_commit1", "coverage": [None, 0, 1, 1, 1, 1, 0] }, { "name": "file2_commit1", "coverage": [1, 1, 0] }, { "name": "file3_commit2", "coverage": [1, 1, 0, 1, None] }, ] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file1_commit1": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, }, "file2_commit1": { "coverage": "CCU", "lines_added": 3, "lines_covered": 2, "lines_unknown": 0, }, }, }, revision2: { "revision_id": 2, "paths": { "file3_commit2": { "coverage": "CCUCN", "lines_added": 4, "lines_covered": 3, "lines_unknown": 0, } }, }, }
def test_simple(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, } }, } } phabricator.upload(report, stack) assert len(responses.calls) >= 3 call = responses.calls[-5] assert (call.request.url == "http://phabricator.test/api/differential.revision.search") params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["constraints"]["ids"] == [1] call = responses.calls[-4] assert (call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets") params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["objectPHID"] == "PHID-DIFF-test" assert params["targetKeys"] == ["arcanist.unit"] call = responses.calls[-3] assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage" params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["buildTargetPHID"] == "PHID-HMBT-test" assert params["type"] == "pass" assert params["unit"] == [{ "name": "Aggregate coverage information", "result": "pass", "coverage": { "file": "NUCCCCU" }, }] assert params["lint"] == [] call = responses.calls[-2] assert (call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets") params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["objectPHID"] == "PHID-DIFF-test" assert params["targetKeys"] == ["arcanist.lint"] call = responses.calls[-1] assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage" params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["buildTargetPHID"] == "PHID-HMBT-test-lint" assert params["type"] == "pass" assert params["unit"] == [] assert params["lint"] == []