def test_backout_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision = commit(hg, 2) hg.backout(rev=revision, message="backout", user="******") revision = hg.log(limit=1)[0][1].decode("ascii") hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]} ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5}}, 2: {}, }
def test_changesets_displacing(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") commit(hg, 1) add_file(hg, local, "file", "-1\n-2\n1\n2\n3\n4\n5\n6\n7\n8\n9\n") revision = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( { "source_files": [ {"name": "file", "coverage": [0, 1, None, 0, 1, 1, 1, 1, 0, 1, 0]} ] } ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 4}}, 2: {"file": {"coverage": "UCNUCCCCUCU", "lines_added": 4, "lines_covered": 2}}, }
def test_changesets_overwriting_one_commit_without_differential( mock_secrets, fake_hg_repo ): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n42\n5\n6\n7\n") revision = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]} ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCXCCU", "lines_added": 6, "lines_covered": 4}} }
def test_coverable_last_lines(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1] }]}) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCNN", "lines_added": 4, "lines_covered": 3, "lines_unknown": 0, } }, } }
def test_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision2 = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({"source_files": []}) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": {} }, revision2: { "revision_id": None, "paths": {} }, }
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) stack = changesets(local, revision) assert len(stack) == 2 assert ( stack[0]["desc"] == "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1" ) assert ( stack[1]["desc"] == "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2" ) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) phab = PhabricatorUploader(local, revision) changesets_coverage = phab.generate(report, stack) assert changesets_coverage == { 1: { "file": { "lines_added": 4, "lines_covered": 2, "coverage": "NUCU" } }, 2: { "file": { "lines_added": 6, "lines_covered": 0, "coverage": "NUCUUUUUUU" } }, } mail = notify_email(revision, stack, changesets_coverage) assert ( mail == "* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://firefox-code-coverage.herokuapp.com/#/changeset/{}): 0 covered out of 6 added.\n" .format( ## noqa revision))
def test_two_commits_two_files(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file1_commit1", "1\n2\n3\n4\n5\n6\n7\n") add_file(hg, local, "file2_commit1", "1\n2\n3\n") revision = commit(hg, 1) add_file(hg, local, "file3_commit2", "1\n2\n3\n4\n5\n") revision = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report({ "source_files": [ { "name": "file1_commit1", "coverage": [None, 0, 1, 1, 1, 1, 0] }, { "name": "file2_commit1", "coverage": [1, 1, 0] }, { "name": "file3_commit2", "coverage": [1, 1, 0, 1, None] }, ] }) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: { "file1_commit1": { "coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5, }, "file2_commit1": { "coverage": "CCU", "lines_added": 3, "lines_covered": 2 }, }, 2: { "file3_commit2": { "coverage": "CCUCN", "lines_added": 5, "lines_covered": 4 } }, }
def test_changesets_overwriting_one_commit_without_differential( mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n42\n5\n6\n7\n") revision2 = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file": { "coverage": "NUCXCCU", "lines_added": 6, "lines_covered": 3, "lines_unknown": 1, } }, }, revision2: { "revision_id": None, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 1, "lines_covered": 1, "lines_unknown": 0, } }, }, }
def test_file_with_no_coverage(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report({"source_files": []}) results = phabricator.generate(report, changesets(local, revision)) assert results == {1: {}}
def test_backout_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision2 = commit(hg, 2) hg.backout(rev=revision2, message=f"Backout {revision2[:12]}", user="******") revision3 = hg.log(limit=1)[0][1].decode("ascii") hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision3) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision3) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, } }, }, revision2: { "revision_id": 2, "paths": {} }, }
def test_simple(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]} ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5}} } phabricator.upload(report, changesets(local, revision)) assert len(responses.calls) >= 3 call = responses.calls[-5] assert ( call.request.url == "http://phabricator.test/api/differential.revision.search" ) params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["constraints"]["ids"] == [1] call = responses.calls[-4] assert ( call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets" ) params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["objectPHID"] == "PHID-DIFF-test" assert params["targetKeys"] == ["arcanist.unit"] call = responses.calls[-3] assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage" params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["buildTargetPHID"] == "PHID-HMBT-test" assert params["type"] == "pass" assert params["unit"] == [ { "name": "Aggregate coverage information", "result": "pass", "coverage": {"file": "NUCCCCU"}, } ] assert params["lint"] == [] call = responses.calls[-2] assert ( call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets" ) params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["objectPHID"] == "PHID-DIFF-test" assert params["targetKeys"] == ["arcanist.lint"] call = responses.calls[-1] assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage" params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["buildTargetPHID"] == "PHID-HMBT-test-lint" assert params["type"] == "pass" assert params["unit"] == [] assert params["lint"] == []
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) assert len(stack) == 2 assert ( stack[0]["desc"] == "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1" ) assert ( stack[1]["desc"] == "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2" ) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) phab = PhabricatorUploader(local, revision2) with hglib.open(local) as hg: changesets_coverage = phab.generate(hg, report, stack) assert changesets_coverage == { revision1: { "revision_id": 1, "paths": { "file": { "lines_added": 3, "lines_covered": 2, "lines_unknown": 0, "coverage": "NCCU", } }, }, revision2: { "revision_id": 2, "paths": { "file": { "lines_added": 6, "lines_covered": 0, "lines_unknown": 0, "coverage": "NCCUUUUUUU", } }, }, } mail = notify_email(revision2, stack, changesets_coverage) assert ( mail == """* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://phabricator.services.mozilla.com/D2): 0 covered out of 6 added.\n""" )
def test_two_commits_two_files(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file1_commit1", "1\n2\n3\n4\n5\n6\n7\n") add_file(hg, local, "file2_commit1", "1\n2\n3\n") revision1 = commit(hg, 1) add_file(hg, local, "file3_commit2", "1\n2\n3\n4\n5\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({ "source_files": [ { "name": "file1_commit1", "coverage": [None, 0, 1, 1, 1, 1, 0] }, { "name": "file2_commit1", "coverage": [1, 1, 0] }, { "name": "file3_commit2", "coverage": [1, 1, 0, 1, None] }, ] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file1_commit1": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, }, "file2_commit1": { "coverage": "CCU", "lines_added": 3, "lines_covered": 2, "lines_unknown": 0, }, }, }, revision2: { "revision_id": 2, "paths": { "file3_commit2": { "coverage": "CCUCN", "lines_added": 4, "lines_covered": 3, "lines_unknown": 0, } }, }, }
def test_generate_from_preexisting(monkeypatch, tmpdir, mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): tmp_path = tmpdir.strpath hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) uploaded_data = None patch_calls = 0 class Blob: def exists(self): return True def upload_from_string(self, val): nonlocal uploaded_data uploaded_data = val def download_as_bytes(self): return zstandard.ZstdCompressor().compress( json.dumps({ "revision1": { "added": 7, "covered": 3, "unknown": 0, }, "revision2": None, }).encode("ascii")) def patch(self): nonlocal patch_calls assert self.content_type == "application/json" assert self.content_encoding == "zstd" patch_calls += 1 class Bucket: def blob(self, path): assert path == "commit_coverage.json.zst" return Blob() myBucket = Bucket() def get_bucket(acc): return myBucket monkeypatch.setattr(commit_coverage, "get_bucket", get_bucket) def list_reports(bucket, repo): assert bucket == myBucket assert repo == "mozilla-central" yield revision2, "linux", "all" yield revision2, "all", "xpcshell" yield revision2, "all", "all" monkeypatch.setattr(commit_coverage, "list_reports", list_reports) def download_report(report_dir, bucket, report_name): os.makedirs( os.path.join(tmp_path, report_dir, "mozilla-central", revision2), exist_ok=True, ) with open( os.path.join(tmp_path, report_dir, "mozilla-central", revision2, "all:all.json"), "w", ) as f: json.dump(report, f) return True monkeypatch.setattr(commit_coverage, "download_report", download_report) with hgmo.HGMO(repo_dir=local) as hgmo_server: commit_coverage.generate(hgmo_server.server_address, local, out_dir=tmp_path) assert patch_calls == 1 dctx = zstandard.ZstdDecompressor() with open(os.path.join(tmp_path, "commit_coverage.json.zst"), "rb") as zf: with dctx.stream_reader(zf) as reader: result = json.load(reader) assert result == json.loads(dctx.decompress(uploaded_data)) assert result == { "revision1": { "added": 7, "covered": 3, "unknown": 0 }, "revision2": None, revision1: { "added": 3, "covered": 2, "unknown": 0, }, revision2: { "added": 6, "covered": 0, "unknown": 0, }, }
def test_generate_two_pushes(monkeypatch, tmpdir, mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): tmp_path = tmpdir.strpath hg, local, remote = fake_hg_repo add_file(hg, local, "file1", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file1", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) add_file(hg, local, "file2", "1\n2\n3\n4\n") revision3 = commit(hg, 1) add_file(hg, local, "file2", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision4 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) report1 = covdir_report({ "source_files": [{ "name": "file1", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0] }] }) report2 = covdir_report({ "source_files": [ { "name": "file1", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0] }, { "name": "file2", "coverage": [None, 0, 1, 0, 0, 0, 0, 0, 1, 1] }, ] }) uploaded_data = None patch_calls = 0 class Blob: def exists(self): return False def upload_from_string(self, val): nonlocal uploaded_data uploaded_data = val def download_as_bytes(self): assert False def patch(self): nonlocal patch_calls assert self.content_type == "application/json" assert self.content_encoding == "zstd" patch_calls += 1 class Bucket: def blob(self, path): assert path == "commit_coverage.json.zst" return Blob() myBucket = Bucket() def get_bucket(acc): return myBucket monkeypatch.setattr(commit_coverage, "get_bucket", get_bucket) def list_reports(bucket, repo): assert bucket == myBucket assert repo == "mozilla-central" yield revision2, "linux", "all" yield revision2, "all", "xpcshell" yield revision2, "all", "all" yield revision4, "all", "all" monkeypatch.setattr(commit_coverage, "list_reports", list_reports) def download_report(report_dir, bucket, report_name): if revision2 in report_name: os.makedirs( os.path.join(tmp_path, report_dir, "mozilla-central", revision2), exist_ok=True, ) with open( os.path.join(tmp_path, report_dir, "mozilla-central", revision2, "all:all.json"), "w", ) as f: json.dump(report1, f) if revision4 in report_name: os.makedirs( os.path.join(tmp_path, report_dir, "mozilla-central", revision4), exist_ok=True, ) with open( os.path.join(tmp_path, report_dir, "mozilla-central", revision4, "all:all.json"), "w", ) as f: json.dump(report2, f) return True monkeypatch.setattr(commit_coverage, "download_report", download_report) with hgmo.HGMO(repo_dir=local) as hgmo_server: lock = threading.Lock() class HGMOMock: def get_automation_relevance_changesets(self, changeset): with lock: return hgmo_server.get_automation_relevance_changesets( changeset) @contextmanager def HGMO(server_address=None, repo_dir=None): assert server_address == hgmo_server.server_address yield HGMOMock() monkeypatch.setattr(commit_coverage, "hgmo", HGMOMock) commit_coverage.generate(hgmo_server.server_address, local, out_dir=tmp_path) assert patch_calls == 1 dctx = zstandard.ZstdDecompressor() with open(os.path.join(tmp_path, "commit_coverage.json.zst"), "rb") as zf: with dctx.stream_reader(zf) as reader: result = json.load(reader) assert result == json.loads(dctx.decompress(uploaded_data)) assert result == { revision1: { "added": 3, "covered": 2, "unknown": 0, }, revision2: { "added": 6, "covered": 0, "unknown": 0, }, revision3: { "added": 3, "covered": 1, "unknown": 0, }, revision4: { "added": 6, "covered": 2, "unknown": 0, }, }