def test_coverable_last_lines(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1] }]}) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCNN", "lines_added": 4, "lines_covered": 3, "lines_unknown": 0, } }, } }
def test_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision2 = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({"source_files": []}) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": {} }, revision2: { "revision_id": None, "paths": {} }, }
def test_backout_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision = commit(hg, 2) hg.backout(rev=revision, message="backout", user="******") revision = hg.log(limit=1)[0][1].decode("ascii") hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]} ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5}}, 2: {}, }
def test_upload_changesets_reducing_size(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n') commit(hg, 1) add_file(hg, local, 'file', '1\n2\n3\n4\n5\n') revision = commit(hg, 2) hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate( {'source_files': [{ 'name': 'file', 'coverage': [None, 0, 1, 1, 1], }]}) assert set(results.keys()) == set([1, 2]) assert set(results[1].keys()) == set(['file']) assert set(results[2].keys()) == set(['file']) assert results[1]['file'] == 'NUCCCXX' assert results[2]['file'] == 'NUCCC'
def test_changesets_overwriting_one_commit_without_differential( mock_secrets, fake_hg_repo ): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n42\n5\n6\n7\n") revision = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]} ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCXCCU", "lines_added": 6, "lines_covered": 4}} }
def test_backout_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n') commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, 'file'), 'ascii')]) revision = commit(hg, 2) hg.backout(rev=revision, message='backout', user='******') revision = hg.log(limit=1)[0][1].decode('ascii') hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({ 'source_files': [{ 'name': 'file', 'coverage': [None, 0, 1, 1, 1, 1, 0], }] }) assert set(results.keys()) == set([1, 2]) assert set(results[1].keys()) == set(['file']) assert set(results[2].keys()) == set([]) assert results[1]['file'] == 'NUCCCCU'
def test_upload_changesets_overwriting_one_commit_without_differential( mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n') commit(hg, 1) add_file(hg, local, 'file', '1\n2\n3\n42\n5\n6\n7\n') revision = commit(hg) hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({ 'source_files': [{ 'name': 'file', 'coverage': [None, 0, 1, 1, 1, 1, 0], }] }) assert set(results.keys()) == set([1]) assert set(results[1].keys()) == set(['file']) assert results[1]['file'] == 'NUCXCCU'
def test_changesets_reducing_size(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n') commit(hg, 1) add_file(hg, local, 'file', '1\n2\n3\n4\n5\n') revision = commit(hg, 2) hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({ 'source_files': [{ 'name': 'file', 'coverage': [None, 0, 1, 1, 1], }] }) assert set(results.keys()) == set([1, 2]) assert set(results[1].keys()) == set(['file']) assert set(results[2].keys()) == set(['file']) assert results[1]['file'] == 'NUCCCXX' assert results[2]['file'] == 'NUCCC'
def test_two_commits_two_files(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file1_commit1', '1\n2\n3\n4\n5\n6\n7\n') add_file(hg, local, 'file2_commit1', '1\n2\n3\n') revision = commit(hg, 1) add_file(hg, local, 'file3_commit2', '1\n2\n3\n4\n5\n') revision = commit(hg, 2) hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({ 'source_files': [{ 'name': 'file1_commit1', 'coverage': [None, 0, 1, 1, 1, 1, 0], }, { 'name': 'file2_commit1', 'coverage': [1, 1, 0], }, { 'name': 'file3_commit2', 'coverage': [1, 1, 0, 1, None], }] }) assert set(results.keys()) == set([1, 2]) assert set(results[1].keys()) == set(['file1_commit1', 'file2_commit1']) assert set(results[2].keys()) == set(['file3_commit2']) assert results[1]['file1_commit1'] == 'NUCCCCU' assert results[1]['file2_commit1'] == 'CCU' assert results[2]['file3_commit2'] == 'CCUCN'
def test_backout_removed_file(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n') commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, 'file'), 'ascii')]) revision = commit(hg, 2) hg.backout(rev=revision, message='backout', user='******') revision = hg.log(limit=1)[0][1].decode('ascii') hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({ 'source_files': [{ 'name': 'file', 'coverage': [None, 0, 1, 1, 1, 1, 0], }] }) assert set(results.keys()) == set([1, 2]) assert set(results[1].keys()) == set(['file']) assert set(results[2].keys()) == set([]) assert results[1]['file'] == 'NUCCCCU'
def test_upload_two_commits_two_files(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file1_commit1', '1\n2\n3\n4\n5\n6\n7\n') add_file(hg, local, 'file2_commit1', '1\n2\n3\n') revision = commit(hg, 1) add_file(hg, local, 'file3_commit2', '1\n2\n3\n4\n5\n') revision = commit(hg, 2) hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({ 'source_files': [{ 'name': 'file1_commit1', 'coverage': [None, 0, 1, 1, 1, 1, 0], }, { 'name': 'file2_commit1', 'coverage': [1, 1, 0], }, { 'name': 'file3_commit2', 'coverage': [1, 1, 0, 1, None], }] }) assert set(results.keys()) == set([1, 2]) assert set(results[1].keys()) == set(['file1_commit1', 'file2_commit1']) assert set(results[2].keys()) == set(['file3_commit2']) assert results[1]['file1_commit1'] == 'NUCCCCU' assert results[1]['file2_commit1'] == 'CCU' assert results[2]['file3_commit2'] == 'CCUCN'
def test_changesets_displacing(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") commit(hg, 1) add_file(hg, local, "file", "-1\n-2\n1\n2\n3\n4\n5\n6\n7\n8\n9\n") revision = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( { "source_files": [ {"name": "file", "coverage": [0, 1, None, 0, 1, 1, 1, 1, 0, 1, 0]} ] } ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 4}}, 2: {"file": {"coverage": "UCNUCCCCUCU", "lines_added": 4, "lines_covered": 2}}, }
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) stack = changesets(local, revision) assert len(stack) == 2 assert ( stack[0]["desc"] == "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1" ) assert ( stack[1]["desc"] == "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2" ) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) phab = PhabricatorUploader(local, revision) changesets_coverage = phab.generate(report, stack) assert changesets_coverage == { 1: { "file": { "lines_added": 4, "lines_covered": 2, "coverage": "NUCU" } }, 2: { "file": { "lines_added": 6, "lines_covered": 0, "coverage": "NUCUUUUUUU" } }, } mail = notify_email(revision, stack, changesets_coverage) assert ( mail == "* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://firefox-code-coverage.herokuapp.com/#/changeset/{}): 0 covered out of 6 added.\n" .format( ## noqa revision))
def test_two_commits_two_files(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file1_commit1", "1\n2\n3\n4\n5\n6\n7\n") add_file(hg, local, "file2_commit1", "1\n2\n3\n") revision = commit(hg, 1) add_file(hg, local, "file3_commit2", "1\n2\n3\n4\n5\n") revision = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report({ "source_files": [ { "name": "file1_commit1", "coverage": [None, 0, 1, 1, 1, 1, 0] }, { "name": "file2_commit1", "coverage": [1, 1, 0] }, { "name": "file3_commit2", "coverage": [1, 1, 0, 1, None] }, ] }) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: { "file1_commit1": { "coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5, }, "file2_commit1": { "coverage": "CCU", "lines_added": 3, "lines_covered": 2 }, }, 2: { "file3_commit2": { "coverage": "CCUCN", "lines_added": 5, "lines_covered": 4 } }, }
def test_changesets_overwriting_one_commit_without_differential( mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n42\n5\n6\n7\n") revision2 = commit(hg) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file": { "coverage": "NUCXCCU", "lines_added": 6, "lines_covered": 3, "lines_unknown": 1, } }, }, revision2: { "revision_id": None, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 1, "lines_covered": 1, "lines_unknown": 0, } }, }, }
def test_file_with_no_coverage(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report({"source_files": []}) results = phabricator.generate(report, changesets(local, revision)) assert results == {1: {}}
def test_backout_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision2 = commit(hg, 2) hg.backout(rev=revision2, message=f"Backout {revision2[:12]}", user="******") revision3 = hg.log(limit=1)[0][1].decode("ascii") hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision3) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision3) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, } }, }, revision2: { "revision_id": 2, "paths": {} }, }
def test_upload_file_with_no_coverage(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n') revision = commit(hg, 1) hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({'source_files': []}) assert set(results.keys()) == set([1]) assert set(results[1].keys()) == set()
def test_file_with_no_coverage(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n') revision = commit(hg, 1) hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({ 'source_files': [] }) assert set(results.keys()) == set([1]) assert set(results[1].keys()) == set()
def test_one_commit_without_differential(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n') revision = commit(hg) hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({ 'source_files': [{ 'name': 'file_one_commit', 'coverage': [None, 0, 1, 1, 1, 1, 0], }] }) assert set(results.keys()) == set()
def analyze_changeset(changeset_to_analyze: str) -> None: report_name = get_name( "mozilla-central", changeset_to_analyze, DEFAULT_FILTER, DEFAULT_FILTER ) assert download_report( os.path.join(out_dir, "ccov-reports"), bucket, report_name ) with open( os.path.join(out_dir, "ccov-reports", f"{report_name}.json"), "r" ) as f: report = json.load(f) phabricatorUploader = PhabricatorUploader( repo_dir, changeset_to_analyze, warnings_enabled=False ) # Use the hg.mozilla.org server to get the automation relevant changesets, since # this information is broken in our local repo (which mozilla-unified). with hgmo.HGMO(server_address=server_address) as hgmo_remote_server: changesets = hgmo_remote_server.get_automation_relevance_changesets( changeset_to_analyze ) results = phabricatorUploader.generate(thread_local.hg, report, changesets) for changeset in changesets: # Lookup changeset coverage from phabricator uploader coverage = results.get(changeset["node"]) if coverage is None: logger.info("No coverage found", changeset=changeset) commit_coverage[changeset["node"]] = None continue commit_coverage[changeset["node"]] = { "added": sum(c["lines_added"] for c in coverage["paths"].values()), "covered": sum(c["lines_covered"] for c in coverage["paths"].values()), "unknown": sum(c["lines_unknown"] for c in coverage["paths"].values()), }
def test_simple(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n') revision = commit(hg, 1) hg.push(dest=bytes(remote, 'ascii')) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) results = phabricator.generate({ 'source_files': [{ 'name': 'file', 'coverage': [None, 0, 1, 1, 1, 1, 0], }] }) assert set(results.keys()) == set([1]) assert set(results[1].keys()) == set(['file']) assert results[1]['file'] == 'NUCCCCU' phabricator.upload({ 'source_files': [{ 'name': 'file', 'coverage': [None, 0, 1, 1, 1, 1, 0], }] }) assert len(responses.calls) >= 3 call = responses.calls[-5] assert call.request.url == 'http://phabricator.test/api/differential.revision.search' params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0]) assert params['constraints']['ids'] == [1] call = responses.calls[-4] assert call.request.url == 'http://phabricator.test/api/harbormaster.queryautotargets' params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0]) assert params['objectPHID'] == 'PHID-DIFF-test' assert params['targetKeys'] == ['arcanist.unit'] call = responses.calls[-3] assert call.request.url == 'http://phabricator.test/api/harbormaster.sendmessage' params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0]) assert params['buildTargetPHID'] == 'PHID-HMBT-test' assert params['type'] == 'pass' assert params['unit'] == [{'name': 'Aggregate coverage information', 'result': 'pass', 'coverage': {'file': 'NUCCCCU'}}] assert params['lint'] == [] call = responses.calls[-2] assert call.request.url == 'http://phabricator.test/api/harbormaster.queryautotargets' params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0]) assert params['objectPHID'] == 'PHID-DIFF-test' assert params['targetKeys'] == ['arcanist.lint'] call = responses.calls[-1] assert call.request.url == 'http://phabricator.test/api/harbormaster.sendmessage' params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0]) assert params['buildTargetPHID'] == 'PHID-HMBT-test-lint' assert params['type'] == 'pass' assert params['unit'] == [] assert params['lint'] == []
def test_two_commits_two_files(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file1_commit1", "1\n2\n3\n4\n5\n6\n7\n") add_file(hg, local, "file2_commit1", "1\n2\n3\n") revision1 = commit(hg, 1) add_file(hg, local, "file3_commit2", "1\n2\n3\n4\n5\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision2) report = covdir_report({ "source_files": [ { "name": "file1_commit1", "coverage": [None, 0, 1, 1, 1, 1, 0] }, { "name": "file2_commit1", "coverage": [1, 1, 0] }, { "name": "file3_commit2", "coverage": [1, 1, 0, 1, None] }, ] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file1_commit1": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, }, "file2_commit1": { "coverage": "CCU", "lines_added": 3, "lines_covered": 2, "lines_unknown": 0, }, }, }, revision2: { "revision_id": 2, "paths": { "file3_commit2": { "coverage": "CCUCN", "lines_added": 4, "lines_covered": 3, "lines_unknown": 0, } }, }, }
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n") revision1 = commit(hg, 1) add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") revision2 = commit(hg, 2) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision2) assert len(stack) == 2 assert ( stack[0]["desc"] == "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1" ) assert ( stack[1]["desc"] == "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2" ) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0] }] }) phab = PhabricatorUploader(local, revision2) with hglib.open(local) as hg: changesets_coverage = phab.generate(hg, report, stack) assert changesets_coverage == { revision1: { "revision_id": 1, "paths": { "file": { "lines_added": 3, "lines_covered": 2, "lines_unknown": 0, "coverage": "NCCU", } }, }, revision2: { "revision_id": 2, "paths": { "file": { "lines_added": 6, "lines_covered": 0, "lines_unknown": 0, "coverage": "NCCUUUUUUU", } }, }, } mail = notify_email(revision2, stack, changesets_coverage) assert ( mail == """* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://phabricator.services.mozilla.com/D2): 0 covered out of 6 added.\n""" )
def test_simple(mock_secrets, mock_phabricator, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision = commit(hg, 1) hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision) report = covdir_report( {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]} ) results = phabricator.generate(report, changesets(local, revision)) assert results == { 1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5}} } phabricator.upload(report, changesets(local, revision)) assert len(responses.calls) >= 3 call = responses.calls[-5] assert ( call.request.url == "http://phabricator.test/api/differential.revision.search" ) params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["constraints"]["ids"] == [1] call = responses.calls[-4] assert ( call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets" ) params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["objectPHID"] == "PHID-DIFF-test" assert params["targetKeys"] == ["arcanist.unit"] call = responses.calls[-3] assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage" params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["buildTargetPHID"] == "PHID-HMBT-test" assert params["type"] == "pass" assert params["unit"] == [ { "name": "Aggregate coverage information", "result": "pass", "coverage": {"file": "NUCCCCU"}, } ] assert params["lint"] == [] call = responses.calls[-2] assert ( call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets" ) params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["objectPHID"] == "PHID-DIFF-test" assert params["targetKeys"] == ["arcanist.lint"] call = responses.calls[-1] assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage" params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0]) assert params["buildTargetPHID"] == "PHID-HMBT-test-lint" assert params["type"] == "pass" assert params["unit"] == [] assert params["lint"] == []