def test_backout_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision = commit(hg, 2) hg.backout(rev=revision, message="backout", user="******") revision = hg.log(limit=1)[0][1].decode("ascii") hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]} ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5}}, 2: {}, }
def test_changesets_displacing(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") commit(hg, 1) add_file(hg, local, "file", "-1\n-2\n1\n2\n3\n4\n5\n6\n7\n8\n9\n") revision = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( { "source_files": [ {"name": "file", "coverage": [0, 1, None, 0, 1, 1, 1, 1, 0, 1, 0]} ] } ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 4}}, 2: {"file": {"coverage": "UCNUCCCCUCU", "lines_added": 4, "lines_covered": 2}}, }
def test_changesets_overwriting_one_commit_without_differential( mock_secrets, fake_hg_repo ): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n42\n5\n6\n7\n") revision = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]} ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCXCCU", "lines_added": 6, "lines_covered": 4}} }
def test_coverable_last_lines(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1] }]}) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCNN", "lines_added": 4, "lines_covered": 3, "lines_unknown": 0, } }, } }
def test_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision2 = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({"source_files": []}) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": {} }, revision2: { "revision_id": None, "paths": {} }, }
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) stack = changesets(local, revision) assert len(stack) == 2 assert ( stack[0]["desc"] == "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1" ) assert ( stack[1]["desc"] == "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2" ) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) phab = PhabricatorUploader(local, revision) changesets_coverage = phab.generate(report, stack) assert changesets_coverage == { 1: { "file": { "lines_added": 4, "lines_covered": 2, "coverage": "NUCU" } }, 2: { "file": { "lines_added": 6, "lines_covered": 0, "coverage": "NUCUUUUUUU" } }, } mail = notify_email(revision, stack, changesets_coverage) assert ( mail == "* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://firefox-code-coverage.herokuapp.com/#/changeset/{}): 0 covered out of 6 added.\n" .format( ## noqa revision))
def test_changesets_overwriting_one_commit_without_differential( mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n42\n5\n6\n7\n") revision2 = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file": { "coverage": "NUCXCCU", "lines_added": 6, "lines_covered": 3, "lines_unknown": 1, } }, }, revision2: { "revision_id": None, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 1, "lines_covered": 1, "lines_unknown": 0, } }, }, }
def test_third_party(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "tools/rewriting/ThirdPartyPaths.txt", "third_party\nsome/path") revision = commit(hg, 1) phabricator = PhabricatorUploader(local, revision) assert phabricator.third_parties == ["third_party", "some/path"] assert phabricator.is_third_party("js/src/xx.cpp") is False assert phabricator.is_third_party("dom/media/yyy.h") is False assert phabricator.is_third_party("third_party/test.cpp") is True assert phabricator.is_third_party("some/test.cpp") is False assert phabricator.is_third_party("some/path/test.cpp") is True
def test_file_with_no_coverage(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report({"source_files": []}) results = phabricator.generate(report, changesets(local, revision)) assert results == {1: {}}
def test_backout_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision2 = commit(hg, 2) hg.backout(rev=revision2, message=f"Backout {revision2[:12]}", user="******") revision3 = hg.log(limit=1)[0][1].decode("ascii") hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision3) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision3) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, } }, }, revision2: { "revision_id": 2, "paths": {} }, }
def test_supported_extensions(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) phabricator = PhabricatorUploader(local, revision) assert phabricator.is_supported_extension("README") is False assert phabricator.is_supported_extension("requirements.txt") is False assert phabricator.is_supported_extension("tools/Cargo.toml") is False assert phabricator.is_supported_extension("tools/Cargo.lock") is False assert phabricator.is_supported_extension("dom/feature.idl") is False assert phabricator.is_supported_extension("dom/feature.webidl") is False assert phabricator.is_supported_extension("xpcom/moz.build") is False assert phabricator.is_supported_extension("payload.json") is False assert phabricator.is_supported_extension("inline.patch") is False assert phabricator.is_supported_extension("README.mozilla") is False assert phabricator.is_supported_extension("config.yml") is False assert phabricator.is_supported_extension("config.yaml") is False assert phabricator.is_supported_extension("config.ini") is False assert phabricator.is_supported_extension("tooling.py") is False assert phabricator.is_supported_extension("test.cpp") is True assert phabricator.is_supported_extension("some/path/to/test.cpp") is True assert phabricator.is_supported_extension("xxxYYY.h") is True assert phabricator.is_supported_extension("test.c") is True assert phabricator.is_supported_extension("test.cc") is True assert phabricator.is_supported_extension("test.cxx") is True assert phabricator.is_supported_extension("test.hh") is True assert phabricator.is_supported_extension("test.hpp") is True assert phabricator.is_supported_extension("test.hxx") is True assert phabricator.is_supported_extension("test.js") is True assert phabricator.is_supported_extension("test.jsm") is True assert phabricator.is_supported_extension("test.xul") is True assert phabricator.is_supported_extension("test.xml") is True assert phabricator.is_supported_extension("test.html") is True assert phabricator.is_supported_extension("test.xhtml") is True assert phabricator.is_supported_extension("test.rs") is True
def test_two_commits_two_files(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file1_commit1", "1\n2\n3\n4\n5\n6\n7\n") add_file(hg, local, "file2_commit1", "1\n2\n3\n") revision = commit(hg, 1) add_file(hg, local, "file3_commit2", "1\n2\n3\n4\n5\n") revision = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report({ "source_files": [ { "name": "file1_commit1", "coverage": [None, 0, 1, 1, 1, 1, 0] }, { "name": "file2_commit1", "coverage": [1, 1, 0] }, { "name": "file3_commit2", "coverage": [1, 1, 0, 1, None] }, ] }) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: { "file1_commit1": { "coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5, }, "file2_commit1": { "coverage": "CCU", "lines_added": 3, "lines_covered": 2 }, }, 2: { "file3_commit2": { "coverage": "CCUCN", "lines_added": 5, "lines_covered": 4 } }, }
def test_simple(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]} ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5}} } phabricator.upload(report, changesets(local, revision)) assert len(responses.calls) >= 3 call = responses.calls[-5] assert ( call.request.url == "http://phabricator.test/api/differential.revision.search" ) params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["constraints"]["ids"] == [1] call = responses.calls[-4] assert ( call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets" ) params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["objectPHID"] == "PHID-DIFF-test" assert params["targetKeys"] == ["arcanist.unit"] call = responses.calls[-3] assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage" params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["buildTargetPHID"] == "PHID-HMBT-test" assert params["type"] == "pass" assert params["unit"] == [ { "name": "Aggregate coverage information", "result": "pass", "coverage": {"file": "NUCCCCU"}, } ] assert params["lint"] == [] call = responses.calls[-2] assert ( call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets" ) params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["objectPHID"] == "PHID-DIFF-test" assert params["targetKeys"] == ["arcanist.lint"] call = responses.calls[-1] assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage" params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["buildTargetPHID"] == "PHID-HMBT-test-lint" assert params["type"] == "pass" assert params["unit"] == [] assert params["lint"] == []
def test_generate_two_pushes(monkeypatch, tmpdir, mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): tmp_path = tmpdir.strpath hg, local, remote = fake_hg_repo add_file(hg, local, "file1", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file1", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) add_file(hg, local, "file2", "1\n2\n3\n4\n") revision3 = commit(hg, 1) add_file(hg, local, "file2", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision4 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) report1 = covdir_report({ "source_files": [{ "name": "file1", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0] }] }) report2 = covdir_report({ "source_files": [ { "name": "file1", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0] }, { "name": "file2", "coverage": [None, 0, 1, 0, 0, 0, 0, 0, 1, 1] }, ] }) uploaded_data = None patch_calls = 0 class Blob: def exists(self): return False def upload_from_string(self, val): nonlocal uploaded_data uploaded_data = val def download_as_bytes(self): assert False def patch(self): nonlocal patch_calls assert self.content_type == "application/json" assert self.content_encoding == "zstd" patch_calls += 1 class Bucket: def blob(self, path): assert path == "commit_coverage.json.zst" return Blob() myBucket = Bucket() def get_bucket(acc): return myBucket monkeypatch.setattr(commit_coverage, "get_bucket", get_bucket) def list_reports(bucket, repo): assert bucket == myBucket assert repo == "mozilla-central" yield revision2, "linux", "all" yield revision2, "all", "xpcshell" yield revision2, "all", "all" yield revision4, "all", "all" monkeypatch.setattr(commit_coverage, "list_reports", list_reports) def download_report(report_dir, bucket, report_name): if revision2 in report_name: os.makedirs( os.path.join(tmp_path, report_dir, "mozilla-central", revision2), exist_ok=True, ) with open( os.path.join(tmp_path, report_dir, "mozilla-central", revision2, "all:all.json"), "w", ) as f: json.dump(report1, f) if revision4 in report_name: os.makedirs( os.path.join(tmp_path, report_dir, "mozilla-central", revision4), exist_ok=True, ) with open( os.path.join(tmp_path, report_dir, "mozilla-central", revision4, "all:all.json"), "w", ) as f: json.dump(report2, f) return True monkeypatch.setattr(commit_coverage, "download_report", download_report) with hgmo.HGMO(repo_dir=local) as hgmo_server: lock = threading.Lock() class HGMOMock: def get_automation_relevance_changesets(self, changeset): with lock: return hgmo_server.get_automation_relevance_changesets( changeset) @contextmanager def HGMO(server_address=None, repo_dir=None): assert server_address == hgmo_server.server_address yield HGMOMock() monkeypatch.setattr(commit_coverage, "hgmo", HGMOMock) commit_coverage.generate(hgmo_server.server_address, local, out_dir=tmp_path) assert patch_calls == 1 dctx = zstandard.ZstdDecompressor() with open(os.path.join(tmp_path, "commit_coverage.json.zst"), "rb") as zf: with dctx.stream_reader(zf) as reader: result = json.load(reader) assert result == json.loads(dctx.decompress(uploaded_data)) assert result == { revision1: { "added": 3, "covered": 2, "unknown": 0, }, revision2: { "added": 6, "covered": 0, "unknown": 0, }, revision3: { "added": 3, "covered": 1, "unknown": 0, }, revision4: { "added": 6, "covered": 2, "unknown": 0, }, }
def test_two_commits_two_files(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file1_commit1", "1\n2\n3\n4\n5\n6\n7\n") add_file(hg, local, "file2_commit1", "1\n2\n3\n") revision1 = commit(hg, 1) add_file(hg, local, "file3_commit2", "1\n2\n3\n4\n5\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({ "source_files": [ { "name": "file1_commit1", "coverage": [None, 0, 1, 1, 1, 1, 0] }, { "name": "file2_commit1", "coverage": [1, 1, 0] }, { "name": "file3_commit2", "coverage": [1, 1, 0, 1, None] }, ] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file1_commit1": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, }, "file2_commit1": { "coverage": "CCU", "lines_added": 3, "lines_covered": 2, "lines_unknown": 0, }, }, }, revision2: { "revision_id": 2, "paths": { "file3_commit2": { "coverage": "CCUCN", "lines_added": 4, "lines_covered": 3, "lines_unknown": 0, } }, }, }
def test_generate_from_preexisting(monkeypatch, tmpdir, mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): tmp_path = tmpdir.strpath hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) uploaded_data = None patch_calls = 0 class Blob: def exists(self): return True def upload_from_string(self, val): nonlocal uploaded_data uploaded_data = val def download_as_bytes(self): return zstandard.ZstdCompressor().compress( json.dumps({ "revision1": { "added": 7, "covered": 3, "unknown": 0, }, "revision2": None, }).encode("ascii")) def patch(self): nonlocal patch_calls assert self.content_type == "application/json" assert self.content_encoding == "zstd" patch_calls += 1 class Bucket: def blob(self, path): assert path == "commit_coverage.json.zst" return Blob() myBucket = Bucket() def get_bucket(acc): return myBucket monkeypatch.setattr(commit_coverage, "get_bucket", get_bucket) def list_reports(bucket, repo): assert bucket == myBucket assert repo == "mozilla-central" yield revision2, "linux", "all" yield revision2, "all", "xpcshell" yield revision2, "all", "all" monkeypatch.setattr(commit_coverage, "list_reports", list_reports) def download_report(report_dir, bucket, report_name): os.makedirs( os.path.join(tmp_path, report_dir, "mozilla-central", revision2), exist_ok=True, ) with open( os.path.join(tmp_path, report_dir, "mozilla-central", revision2, "all:all.json"), "w", ) as f: json.dump(report, f) return True monkeypatch.setattr(commit_coverage, "download_report", download_report) with hgmo.HGMO(repo_dir=local) as hgmo_server: commit_coverage.generate(hgmo_server.server_address, local, out_dir=tmp_path) assert patch_calls == 1 dctx = zstandard.ZstdDecompressor() with open(os.path.join(tmp_path, "commit_coverage.json.zst"), "rb") as zf: with dctx.stream_reader(zf) as reader: result = json.load(reader) assert result == json.loads(dctx.decompress(uploaded_data)) assert result == { "revision1": { "added": 7, "covered": 3, "unknown": 0 }, "revision2": None, revision1: { "added": 3, "covered": 2, "unknown": 0, }, revision2: { "added": 6, "covered": 0, "unknown": 0, }, }
def test_trigger_from_preexisting(monkeypatch, tmpdir, mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): tmp_path = tmpdir.strpath hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) add_file(hg, local, "file2", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision3 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) add_file(hg, local, "file3", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision4 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) responses.add( responses.HEAD, "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/triggered_revisions.zst", status=200, ) responses.add( responses.GET, "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/triggered_revisions.zst", status=200, body=zstandard.ZstdCompressor().compress( f"{revision2}\n{revision3}".encode("ascii")), ) copy_pushlog_database(remote, local) myBucket = {} def get_bucket(acc): return myBucket monkeypatch.setattr(trigger_missing, "get_bucket", get_bucket) gcp_covdir_exists_calls = 0 def gcp_covdir_exists(bucket, repository, revision, platform, suite): nonlocal gcp_covdir_exists_calls gcp_covdir_exists_calls += 1 assert bucket == myBucket assert repository == "mozilla-central" assert platform == "all" assert suite == "all" return revision == revision3 monkeypatch.setattr(uploader, "gcp_covdir_exists", gcp_covdir_exists) def slugId(): return "myGroupId" monkeypatch.setattr(trigger_missing, "slugId", slugId) trigger_hook_calls = 0 def get_service(serv): assert serv == "hooks" class HooksService: def triggerHook(self, hook_group, hook_id, payload): nonlocal trigger_hook_calls assert hook_group == "project-relman" assert hook_id == "code-coverage-repo-production" assert payload == { "REPOSITORY": "https://hg.mozilla.org/mozilla-central", "REVISION": revision4, "taskGroupId": "myGroupId", "taskName": f"covdir for {revision4}", } trigger_hook_calls += 1 return HooksService() monkeypatch.setattr(taskcluster_config, "get_service", get_service) get_decision_task_calls = 0 def get_decision_task(branch, revision): nonlocal get_decision_task_calls assert branch == "mozilla-central" assert revision == revision4 get_decision_task_calls += 1 return f"decisionTask-{revision}" monkeypatch.setattr(taskcluster, "get_decision_task", get_decision_task) get_task_details_calls = 0 def get_task_details(decision_task_id): nonlocal get_task_details_calls assert decision_task_id == f"decisionTask-{revision4}" get_task_details_calls += 1 return {"taskGroupId": f"decisionTaskGroup-{revision4}"} monkeypatch.setattr(taskcluster, "get_task_details", get_task_details) get_tasks_in_group_calls = 0 def get_tasks_in_group(group_id): nonlocal get_tasks_in_group_calls assert group_id == f"decisionTaskGroup-{revision4}" get_tasks_in_group_calls += 1 return [{ "status": { "state": "completed", }, "task": { "metadata": { "name": "build-linux64-ccov/opt", } }, }] monkeypatch.setattr(taskcluster, "get_tasks_in_group", get_tasks_in_group) with hgmo.HGMO(repo_dir=local) as hgmo_server: trigger_missing.trigger_missing(hgmo_server.server_address, out_dir=tmp_path) assert gcp_covdir_exists_calls == 1 assert trigger_hook_calls == 1 assert get_decision_task_calls == 1 assert get_task_details_calls == 1 assert get_tasks_in_group_calls == 1 dctx = zstandard.ZstdDecompressor() with open(os.path.join(tmp_path, "triggered_revisions.zst"), "rb") as zf: with dctx.stream_reader(zf) as reader: with io.TextIOWrapper(reader, encoding="ascii") as f: result = set(rev for rev in f.read().splitlines()) assert result == {revision2, revision3, revision4}
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) assert len(stack) == 2 assert ( stack[0]["desc"] == "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1" ) assert ( stack[1]["desc"] == "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2" ) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) phab = PhabricatorUploader(local, revision2) with hglib.open(local) as hg: changesets_coverage = phab.generate(hg, report, stack) assert changesets_coverage == { revision1: { "revision_id": 1, "paths": { "file": { "lines_added": 3, "lines_covered": 2, "lines_unknown": 0, "coverage": "NCCU", } }, }, revision2: { "revision_id": 2, "paths": { "file": { "lines_added": 6, "lines_covered": 0, "lines_unknown": 0, "coverage": "NCCUUUUUUU", } }, }, } mail = notify_email(revision2, stack, changesets_coverage) assert ( mail == """* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://phabricator.services.mozilla.com/D2): 0 covered out of 6 added.\n""" )