Example #1
0
def test_changesets_displacing(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    commit(hg, 1)

    add_file(hg, local, "file", "-1\n-2\n1\n2\n3\n4\n5\n6\n7\n8\n9\n")
    revision = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    report = covdir_report(
        {
            "source_files": [
                {"name": "file", "coverage": [0, 1, None, 0, 1, 1, 1, 1, 0, 1, 0]}
            ]
        }
    )
    results = phabricator.generate(report, changesets(local, revision))

    assert results == {
        1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 4}},
        2: {"file": {"coverage": "UCNUCCCCUCU", "lines_added": 4, "lines_covered": 2}},
    }
def test_coverable_last_lines(mock_secrets, mock_phabricator, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    revision = commit(hg, 1)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    report = covdir_report(
        {"source_files": [{
            "name": "file",
            "coverage": [None, 0, 1, 1, 1]
        }]})
    with hgmo.HGMO(local) as hgmo_server:
        stack = changesets(hgmo_server, revision)

    with hglib.open(local) as hg:
        results = phabricator.generate(hg, report, stack)

    assert results == {
        revision: {
            "revision_id": 1,
            "paths": {
                "file": {
                    "coverage": "NUCCCNN",
                    "lines_added": 4,
                    "lines_covered": 3,
                    "lines_unknown": 0,
                }
            },
        }
    }
Example #3
0
def test_changesets_overwriting_one_commit_without_differential(
    mock_secrets, fake_hg_repo
):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    commit(hg, 1)

    add_file(hg, local, "file", "1\n2\n3\n42\n5\n6\n7\n")
    revision = commit(hg)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)

    report = covdir_report(
        {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]}
    )
    results = phabricator.generate(report, changesets(local, revision))

    assert results == {
        1: {"file": {"coverage": "NUCXCCU", "lines_added": 6, "lines_covered": 4}}
    }
def test_removed_file(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    revision1 = commit(hg, 1)

    hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")])
    revision2 = commit(hg)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision2)
    report = covdir_report({"source_files": []})
    with hgmo.HGMO(local) as hgmo_server:
        stack = changesets(hgmo_server, revision2)

    with hglib.open(local) as hg:
        results = phabricator.generate(hg, report, stack)

    assert results == {
        revision1: {
            "revision_id": 1,
            "paths": {}
        },
        revision2: {
            "revision_id": None,
            "paths": {}
        },
    }
Example #5
0
def test_backout_removed_file(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    commit(hg, 1)

    hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")])
    revision = commit(hg, 2)

    hg.backout(rev=revision, message="backout", user="******")
    revision = hg.log(limit=1)[0][1].decode("ascii")

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    report = covdir_report(
        {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]}
    )
    results = phabricator.generate(report, changesets(local, revision))

    assert results == {
        1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5}},
        2: {},
    }
Example #6
0
def test_upload_two_commits_two_files(mock_secrets, mock_phabricator,
                                      fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file1_commit1', '1\n2\n3\n4\n5\n6\n7\n')
    add_file(hg, local, 'file2_commit1', '1\n2\n3\n')
    revision = commit(hg, 1)

    add_file(hg, local, 'file3_commit2', '1\n2\n3\n4\n5\n')
    revision = commit(hg, 2)

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({
        'source_files': [{
            'name': 'file1_commit1',
            'coverage': [None, 0, 1, 1, 1, 1, 0],
        }, {
            'name': 'file2_commit1',
            'coverage': [1, 1, 0],
        }, {
            'name': 'file3_commit2',
            'coverage': [1, 1, 0, 1, None],
        }]
    })

    assert set(results.keys()) == set([1, 2])
    assert set(results[1].keys()) == set(['file1_commit1', 'file2_commit1'])
    assert set(results[2].keys()) == set(['file3_commit2'])
    assert results[1]['file1_commit1'] == 'NUCCCCU'
    assert results[1]['file2_commit1'] == 'CCU'
    assert results[2]['file3_commit2'] == 'CCUCN'
Example #7
0
    def go_from_trigger_try(self):
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)

        with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server:
            changesets = hgmo_server.get_automation_relevance_changesets(
                self.revision)['changesets']

        if not any(
                phabricatorUploader.parse_revision_id(changeset['desc'])
                is not None for changeset in changesets):
            logger.info(
                'None of the commits in the try push are linked to a Phabricator revision'
            )
            return

        self.retrieve_source_and_artifacts()

        output = grcov.report(
            self.artifactsHandler.get(),
            source_dir=self.repo_dir,
            service_number='SERVICE_NUMBER',
            commit_sha='COMMIT_SHA',
            token='TOKEN',
        )
        logger.info('Report generated successfully')

        logger.info('Upload changeset coverage data to Phabricator')
        phabricatorUploader.upload(json.loads(output), changesets)
def test_backout_removed_file(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n')
    commit(hg, 1)

    hg.remove(files=[bytes(os.path.join(local, 'file'), 'ascii')])
    revision = commit(hg, 2)

    hg.backout(rev=revision, message='backout', user='******')
    revision = hg.log(limit=1)[0][1].decode('ascii')

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({
        'source_files': [{
            'name': 'file',
            'coverage': [None, 0, 1, 1, 1, 1, 0],
        }]
    })

    assert set(results.keys()) == set([1, 2])
    assert set(results[1].keys()) == set(['file'])
    assert set(results[2].keys()) == set([])
    assert results[1]['file'] == 'NUCCCCU'
Example #9
0
def test_upload_changesets_reducing_size(mock_secrets, mock_phabricator,
                                         fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n')
    commit(hg, 1)

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n')
    revision = commit(hg, 2)

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate(
        {'source_files': [{
            'name': 'file',
            'coverage': [None, 0, 1, 1, 1],
        }]})

    assert set(results.keys()) == set([1, 2])
    assert set(results[1].keys()) == set(['file'])
    assert set(results[2].keys()) == set(['file'])
    assert results[1]['file'] == 'NUCCCXX'
    assert results[2]['file'] == 'NUCCC'
Example #10
0
def test_two_commits_two_files(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file1_commit1', '1\n2\n3\n4\n5\n6\n7\n')
    add_file(hg, local, 'file2_commit1', '1\n2\n3\n')
    revision = commit(hg, 1)

    add_file(hg, local, 'file3_commit2', '1\n2\n3\n4\n5\n')
    revision = commit(hg, 2)

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({
        'source_files': [{
            'name': 'file1_commit1',
            'coverage': [None, 0, 1, 1, 1, 1, 0],
        }, {
            'name': 'file2_commit1',
            'coverage': [1, 1, 0],
        }, {
            'name': 'file3_commit2',
            'coverage': [1, 1, 0, 1, None],
        }]
    })

    assert set(results.keys()) == set([1, 2])
    assert set(results[1].keys()) == set(['file1_commit1', 'file2_commit1'])
    assert set(results[2].keys()) == set(['file3_commit2'])
    assert results[1]['file1_commit1'] == 'NUCCCCU'
    assert results[1]['file2_commit1'] == 'CCU'
    assert results[2]['file3_commit2'] == 'CCUCN'
Example #11
0
def test_changesets_reducing_size(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n')
    commit(hg, 1)

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n')
    revision = commit(hg, 2)

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({
        'source_files': [{
            'name': 'file',
            'coverage': [None, 0, 1, 1, 1],
        }]
    })

    assert set(results.keys()) == set([1, 2])
    assert set(results[1].keys()) == set(['file'])
    assert set(results[2].keys()) == set(['file'])
    assert results[1]['file'] == 'NUCCCXX'
    assert results[2]['file'] == 'NUCCC'
Example #12
0
def test_upload_changesets_overwriting_one_commit_without_differential(
        mock_secrets, mock_phabricator, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n')
    commit(hg, 1)

    add_file(hg, local, 'file', '1\n2\n3\n42\n5\n6\n7\n')
    revision = commit(hg)

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({
        'source_files': [{
            'name': 'file',
            'coverage': [None, 0, 1, 1, 1, 1, 0],
        }]
    })

    assert set(results.keys()) == set([1])
    assert set(results[1].keys()) == set(['file'])
    assert results[1]['file'] == 'NUCXCCU'
Example #13
0
def test_backout_removed_file(mock_secrets, mock_phabricator, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n')
    commit(hg, 1)

    hg.remove(files=[bytes(os.path.join(local, 'file'), 'ascii')])
    revision = commit(hg, 2)

    hg.backout(rev=revision, message='backout', user='******')
    revision = hg.log(limit=1)[0][1].decode('ascii')

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({
        'source_files': [{
            'name': 'file',
            'coverage': [None, 0, 1, 1, 1, 1, 0],
        }]
    })

    assert set(results.keys()) == set([1, 2])
    assert set(results[1].keys()) == set(['file'])
    assert set(results[2].keys()) == set([])
    assert results[1]['file'] == 'NUCCCCU'
Example #14
0
 def upload_phabricator(self, report, changesets):
     """
     Helper to upload coverage report on Phabricator
     """
     phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)
     logger.info("Upload changeset coverage data to Phabricator")
     return phabricatorUploader.upload(report, changesets)
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator,
                      fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n")
    commit(hg, 1)

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")
    revision = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    stack = changesets(local, revision)
    assert len(stack) == 2
    assert (
        stack[0]["desc"] ==
        "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1"
    )
    assert (
        stack[1]["desc"] ==
        "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2"
    )

    report = covdir_report({
        "source_files": [{
            "name": "file",
            "coverage": [None, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]
        }]
    })
    phab = PhabricatorUploader(local, revision)
    changesets_coverage = phab.generate(report, stack)

    assert changesets_coverage == {
        1: {
            "file": {
                "lines_added": 4,
                "lines_covered": 2,
                "coverage": "NUCU"
            }
        },
        2: {
            "file": {
                "lines_added": 6,
                "lines_covered": 0,
                "coverage": "NUCUUUUUUU"
            }
        },
    }

    mail = notify_email(revision, stack, changesets_coverage)
    assert (
        mail ==
        "* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://firefox-code-coverage.herokuapp.com/#/changeset/{}): 0 covered out of 6 added.\n"
        .format(  ## noqa
            revision))
Example #16
0
def test_two_commits_two_files(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file1_commit1", "1\n2\n3\n4\n5\n6\n7\n")
    add_file(hg, local, "file2_commit1", "1\n2\n3\n")
    revision = commit(hg, 1)

    add_file(hg, local, "file3_commit2", "1\n2\n3\n4\n5\n")
    revision = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    report = covdir_report({
        "source_files": [
            {
                "name": "file1_commit1",
                "coverage": [None, 0, 1, 1, 1, 1, 0]
            },
            {
                "name": "file2_commit1",
                "coverage": [1, 1, 0]
            },
            {
                "name": "file3_commit2",
                "coverage": [1, 1, 0, 1, None]
            },
        ]
    })
    results = phabricator.generate(report, changesets(local, revision))

    assert results == {
        1: {
            "file1_commit1": {
                "coverage": "NUCCCCU",
                "lines_added": 7,
                "lines_covered": 5,
            },
            "file2_commit1": {
                "coverage": "CCU",
                "lines_added": 3,
                "lines_covered": 2
            },
        },
        2: {
            "file3_commit2": {
                "coverage": "CCUCN",
                "lines_added": 5,
                "lines_covered": 4
            }
        },
    }
Example #17
0
    def go_from_trigger_mozilla_central(self):
        # Check the covdir report does not already exists
        if uploader.gcp_covdir_exists(self.branch, self.revision):
            logger.warn("Covdir report already on GCP")
            return

        self.retrieve_source_and_artifacts()

        # Check that all JavaScript files present in the coverage artifacts actually exist.
        # If they don't, there might be a bug in the LCOV rewriter.
        for artifact in self.artifactsHandler.get():
            if "jsvm" not in artifact:
                continue

            with zipfile.ZipFile(artifact, "r") as zf:
                for file_name in zf.namelist():
                    with zf.open(file_name, "r") as fl:
                        source_files = [
                            line[3:].decode("utf-8").rstrip()
                            for line in fl
                            if line.startswith(b"SF:")
                        ]
                        missing_files = [
                            f
                            for f in source_files
                            if not os.path.exists(os.path.join(self.repo_dir, f))
                        ]
                        if len(missing_files) != 0:
                            logger.warn(
                                f"{missing_files} are present in coverage reports, but missing from the repository"
                            )

        report = self.generate_covdir()

        paths = uploader.covdir_paths(report)
        expected_extensions = [".js", ".cpp"]
        for extension in expected_extensions:
            assert any(
                path.endswith(extension) for path in paths
            ), "No {} file in the generated report".format(extension)

        # Get pushlog and ask the backend to generate the coverage by changeset
        # data, which will be cached.
        with hgmo.HGMO(self.repo_dir) as hgmo_server:
            changesets = hgmo_server.get_automation_relevance_changesets(self.revision)

        logger.info("Upload changeset coverage data to Phabricator")
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)
        changesets_coverage = phabricatorUploader.upload(report, changesets)

        uploader.gcp(self.branch, self.revision, report)

        logger.info("Build uploaded on GCP")
        notify_email(self.revision, changesets, changesets_coverage)
Example #18
0
    def go_from_trigger_mozilla_central(self):
        commit_sha = self.githubUtils.mercurial_to_git(self.revision)
        try:
            uploader.get_codecov(commit_sha)
            logger.warn('Build was already injested')
            return
        except requests.exceptions.HTTPError:
            pass

        self.retrieve_source_and_artifacts()

        self.githubUtils.update_geckodev_repo()

        logger.info('GitHub revision', revision=commit_sha)

        self.githubUtils.post_github_status(commit_sha)

        r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' %
                         self.revision)
        r.raise_for_status()
        push_id = r.json()['pushid']

        output = grcov.report(self.artifactsHandler.get(),
                              source_dir=self.repo_dir,
                              service_number=push_id,
                              commit_sha=commit_sha,
                              token=secrets[secrets.COVERALLS_TOKEN])
        logger.info('Report generated successfully')

        report = json.loads(output)
        expected_extensions = ['.js', '.cpp']
        for extension in expected_extensions:
            assert any(
                f['name'].endswith(extension) for f in report['source_files']
            ), 'No {} file in the generated report'.format(extension)

        logger.info('Upload changeset coverage data to Phabricator')
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)
        phabricatorUploader.upload(report)

        with ThreadPoolExecutorResult(max_workers=2) as executor:
            executor.submit(uploader.coveralls, output)
            executor.submit(uploader.codecov, output, commit_sha)

        logger.info('Waiting for build to be ingested by Codecov...')
        # Wait until the build has been ingested by Codecov.
        if uploader.codecov_wait(commit_sha):
            logger.info('Build ingested by codecov.io')
            notifier = Notifier(self.repo_dir, self.revision, self.client_id,
                                self.access_token)
            notifier.notify()
        else:
            logger.error('codecov.io took too much time to ingest data.')
def test_changesets_overwriting_one_commit_without_differential(
        mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    revision1 = commit(hg, 1)

    add_file(hg, local, "file", "1\n2\n3\n42\n5\n6\n7\n")
    revision2 = commit(hg)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision2)

    report = covdir_report({
        "source_files": [{
            "name": "file",
            "coverage": [None, 0, 1, 1, 1, 1, 0]
        }]
    })
    with hgmo.HGMO(local) as hgmo_server:
        stack = changesets(hgmo_server, revision2)

    with hglib.open(local) as hg:
        results = phabricator.generate(hg, report, stack)

    assert results == {
        revision1: {
            "revision_id": 1,
            "paths": {
                "file": {
                    "coverage": "NUCXCCU",
                    "lines_added": 6,
                    "lines_covered": 3,
                    "lines_unknown": 1,
                }
            },
        },
        revision2: {
            "revision_id": None,
            "paths": {
                "file": {
                    "coverage": "NUCCCCU",
                    "lines_added": 1,
                    "lines_covered": 1,
                    "lines_unknown": 0,
                }
            },
        },
    }
Example #20
0
def test_file_with_no_coverage(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    revision = commit(hg, 1)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    report = covdir_report({"source_files": []})
    results = phabricator.generate(report, changesets(local, revision))

    assert results == {1: {}}
def test_backout_removed_file(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    revision1 = commit(hg, 1)

    hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")])
    revision2 = commit(hg, 2)

    hg.backout(rev=revision2,
               message=f"Backout {revision2[:12]}",
               user="******")
    revision3 = hg.log(limit=1)[0][1].decode("ascii")

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision3)
    report = covdir_report({
        "source_files": [{
            "name": "file",
            "coverage": [None, 0, 1, 1, 1, 1, 0]
        }]
    })
    with hgmo.HGMO(local) as hgmo_server:
        stack = changesets(hgmo_server, revision3)

    with hglib.open(local) as hg:
        results = phabricator.generate(hg, report, stack)

    assert results == {
        revision1: {
            "revision_id": 1,
            "paths": {
                "file": {
                    "coverage": "NUCCCCU",
                    "lines_added": 6,
                    "lines_covered": 4,
                    "lines_unknown": 0,
                }
            },
        },
        revision2: {
            "revision_id": 2,
            "paths": {}
        },
    }
Example #22
0
def test_upload_file_with_no_coverage(mock_secrets, mock_phabricator,
                                      fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n')
    revision = commit(hg, 1)

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({'source_files': []})

    assert set(results.keys()) == set([1])
    assert set(results[1].keys()) == set()
Example #23
0
def test_file_with_no_coverage(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n')
    revision = commit(hg, 1)

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({
        'source_files': []
    })

    assert set(results.keys()) == set([1])
    assert set(results[1].keys()) == set()
Example #24
0
def test_one_commit_without_differential(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n')
    revision = commit(hg)

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({
        'source_files': [{
            'name': 'file_one_commit',
            'coverage': [None, 0, 1, 1, 1, 1, 0],
        }]
    })

    assert set(results.keys()) == set()
Example #25
0
    def go_from_trigger_try(self):
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)

        with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server:
            changesets = hgmo_server.get_automation_relevance_changesets(self.revision)

        if not any(
            parse_revision_id(changeset["desc"]) is not None for changeset in changesets
        ):
            logger.info(
                "None of the commits in the try push are linked to a Phabricator revision"
            )
            return

        self.retrieve_source_and_artifacts()

        report = self.generate_covdir()

        logger.info("Upload changeset coverage data to Phabricator")
        phabricatorUploader.upload(report, changesets)
Example #26
0
    def analyze_changeset(changeset_to_analyze: str) -> None:
        report_name = get_name(
            "mozilla-central", changeset_to_analyze, DEFAULT_FILTER, DEFAULT_FILTER
        )
        assert download_report(
            os.path.join(out_dir, "ccov-reports"), bucket, report_name
        )

        with open(
            os.path.join(out_dir, "ccov-reports", f"{report_name}.json"), "r"
        ) as f:
            report = json.load(f)

        phabricatorUploader = PhabricatorUploader(
            repo_dir, changeset_to_analyze, warnings_enabled=False
        )

        # Use the hg.mozilla.org server to get the automation relevant changesets, since
        # this information is broken in our local repo (which mozilla-unified).
        with hgmo.HGMO(server_address=server_address) as hgmo_remote_server:
            changesets = hgmo_remote_server.get_automation_relevance_changesets(
                changeset_to_analyze
            )

        results = phabricatorUploader.generate(thread_local.hg, report, changesets)

        for changeset in changesets:
            # Lookup changeset coverage from phabricator uploader
            coverage = results.get(changeset["node"])
            if coverage is None:
                logger.info("No coverage found", changeset=changeset)
                commit_coverage[changeset["node"]] = None
                continue

            commit_coverage[changeset["node"]] = {
                "added": sum(c["lines_added"] for c in coverage["paths"].values()),
                "covered": sum(c["lines_covered"] for c in coverage["paths"].values()),
                "unknown": sum(c["lines_unknown"] for c in coverage["paths"].values()),
            }
Example #27
0
    def go_from_trigger_try(self):
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)

        with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server:
            changesets = hgmo_server.get_automation_relevance_changesets(self.revision)

        if not any(phabricatorUploader.parse_revision_id(changeset['desc']) is not None for changeset in changesets):
            logger.info('None of the commits in the try push are linked to a Phabricator revision')
            return

        self.retrieve_source_and_artifacts()

        output = grcov.report(
            self.artifactsHandler.get(),
            source_dir=self.repo_dir,
            service_number='SERVICE_NUMBER',
            commit_sha='COMMIT_SHA',
            token='TOKEN',
        )
        logger.info('Report generated successfully')

        logger.info('Upload changeset coverage data to Phabricator')
        phabricatorUploader.upload(json.loads(output), changesets)
Example #28
0
def test_third_party(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "tools/rewriting/ThirdPartyPaths.txt", "third_party\nsome/path")
    revision = commit(hg, 1)

    phabricator = PhabricatorUploader(local, revision)

    assert phabricator.third_parties == ["third_party", "some/path"]

    assert phabricator.is_third_party("js/src/xx.cpp") is False
    assert phabricator.is_third_party("dom/media/yyy.h") is False
    assert phabricator.is_third_party("third_party/test.cpp") is True
    assert phabricator.is_third_party("some/test.cpp") is False
    assert phabricator.is_third_party("some/path/test.cpp") is True
Example #29
0
def test_supported_extensions(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    revision = commit(hg, 1)

    phabricator = PhabricatorUploader(local, revision)

    assert phabricator.is_supported_extension("README") is False
    assert phabricator.is_supported_extension("requirements.txt") is False
    assert phabricator.is_supported_extension("tools/Cargo.toml") is False
    assert phabricator.is_supported_extension("tools/Cargo.lock") is False
    assert phabricator.is_supported_extension("dom/feature.idl") is False
    assert phabricator.is_supported_extension("dom/feature.webidl") is False
    assert phabricator.is_supported_extension("xpcom/moz.build") is False
    assert phabricator.is_supported_extension("payload.json") is False
    assert phabricator.is_supported_extension("inline.patch") is False
    assert phabricator.is_supported_extension("README.mozilla") is False
    assert phabricator.is_supported_extension("config.yml") is False
    assert phabricator.is_supported_extension("config.yaml") is False
    assert phabricator.is_supported_extension("config.ini") is False
    assert phabricator.is_supported_extension("tooling.py") is False

    assert phabricator.is_supported_extension("test.cpp") is True
    assert phabricator.is_supported_extension("some/path/to/test.cpp") is True
    assert phabricator.is_supported_extension("xxxYYY.h") is True
    assert phabricator.is_supported_extension("test.c") is True
    assert phabricator.is_supported_extension("test.cc") is True
    assert phabricator.is_supported_extension("test.cxx") is True
    assert phabricator.is_supported_extension("test.hh") is True
    assert phabricator.is_supported_extension("test.hpp") is True
    assert phabricator.is_supported_extension("test.hxx") is True
    assert phabricator.is_supported_extension("test.js") is True
    assert phabricator.is_supported_extension("test.jsm") is True
    assert phabricator.is_supported_extension("test.xul") is True
    assert phabricator.is_supported_extension("test.xml") is True
    assert phabricator.is_supported_extension("test.html") is True
    assert phabricator.is_supported_extension("test.xhtml") is True
    assert phabricator.is_supported_extension("test.rs") is True
Example #30
0
    def go_from_trigger_mozilla_central(self):
        commit_sha = self.githubUtils.mercurial_to_git(self.revision)
        try:
            uploader.get_codecov(commit_sha)
            logger.warn('Build was already injested')
            return
        except requests.exceptions.HTTPError:
            pass

        self.retrieve_source_and_artifacts()

        self.githubUtils.update_geckodev_repo()

        logger.info('GitHub revision', revision=commit_sha)

        self.githubUtils.post_github_status(commit_sha)

        r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision)
        r.raise_for_status()
        push_id = r.json()['pushid']

        # Check that all JavaScript files present in the coverage artifacts actually exist.
        # If they don't, there might be a bug in the LCOV rewriter.
        for artifact in self.artifactsHandler.get():
            if 'jsvm' not in artifact:
                continue

            with zipfile.ZipFile(artifact, 'r') as zf:
                for file_name in zf.namelist():
                    with zf.open(file_name, 'r') as fl:
                        source_files = [line[3:].decode('utf-8').rstrip() for line in fl if line.startswith(b'SF:')]
                        missing_files = [f for f in source_files if not os.path.exists(os.path.join(self.repo_dir, f))]
                        if len(missing_files) != 0:
                            logger.warn(f'{missing_files} are present in coverage reports, but missing from the repository')

        output = grcov.report(
            self.artifactsHandler.get(),
            source_dir=self.repo_dir,
            service_number=push_id,
            commit_sha=commit_sha,
            token=secrets[secrets.COVERALLS_TOKEN]
        )
        logger.info('Report generated successfully')

        report = json.loads(output)
        expected_extensions = ['.js', '.cpp']
        for extension in expected_extensions:
            assert any(f['name'].endswith(extension) for f in
                       report['source_files']), 'No {} file in the generated report'.format(extension)

        logger.info('Upload changeset coverage data to Phabricator')
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)
        phabricatorUploader.upload(report)

        with ThreadPoolExecutorResult(max_workers=2) as executor:
            executor.submit(uploader.coveralls, output)
            executor.submit(uploader.codecov, output, commit_sha)

        logger.info('Waiting for build to be ingested by Codecov...')
        # Wait until the build has been ingested by Codecov.
        if uploader.codecov_wait(commit_sha):
            logger.info('Build ingested by codecov.io')
            notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token)
            notifier.notify()
        else:
            logger.error('codecov.io took too much time to ingest data.')
def test_simple(mock_secrets, mock_phabricator, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, 'file', '1\n2\n3\n4\n5\n6\n7\n')
    revision = commit(hg, 1)

    hg.push(dest=bytes(remote, 'ascii'))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    results = phabricator.generate({
        'source_files': [{
            'name': 'file',
            'coverage': [None, 0, 1, 1, 1, 1, 0],
        }]
    })

    assert set(results.keys()) == set([1])
    assert set(results[1].keys()) == set(['file'])
    assert results[1]['file'] == 'NUCCCCU'

    phabricator.upload({
        'source_files': [{
            'name': 'file',
            'coverage': [None, 0, 1, 1, 1, 1, 0],
        }]
    })

    assert len(responses.calls) >= 3

    call = responses.calls[-5]
    assert call.request.url == 'http://phabricator.test/api/differential.revision.search'
    params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0])
    assert params['constraints']['ids'] == [1]

    call = responses.calls[-4]
    assert call.request.url == 'http://phabricator.test/api/harbormaster.queryautotargets'
    params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0])
    assert params['objectPHID'] == 'PHID-DIFF-test'
    assert params['targetKeys'] == ['arcanist.unit']

    call = responses.calls[-3]
    assert call.request.url == 'http://phabricator.test/api/harbormaster.sendmessage'
    params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0])
    assert params['buildTargetPHID'] == 'PHID-HMBT-test'
    assert params['type'] == 'pass'
    assert params['unit'] == [{'name': 'Aggregate coverage information', 'result': 'pass', 'coverage': {'file': 'NUCCCCU'}}]
    assert params['lint'] == []

    call = responses.calls[-2]
    assert call.request.url == 'http://phabricator.test/api/harbormaster.queryautotargets'
    params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0])
    assert params['objectPHID'] == 'PHID-DIFF-test'
    assert params['targetKeys'] == ['arcanist.lint']

    call = responses.calls[-1]
    assert call.request.url == 'http://phabricator.test/api/harbormaster.sendmessage'
    params = json.loads(urllib.parse.parse_qs(call.request.body)['params'][0])
    assert params['buildTargetPHID'] == 'PHID-HMBT-test-lint'
    assert params['type'] == 'pass'
    assert params['unit'] == []
    assert params['lint'] == []
def test_two_commits_two_files(mock_secrets, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file1_commit1", "1\n2\n3\n4\n5\n6\n7\n")
    add_file(hg, local, "file2_commit1", "1\n2\n3\n")
    revision1 = commit(hg, 1)

    add_file(hg, local, "file3_commit2", "1\n2\n3\n4\n5\n")
    revision2 = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision2)
    report = covdir_report({
        "source_files": [
            {
                "name": "file1_commit1",
                "coverage": [None, 0, 1, 1, 1, 1, 0]
            },
            {
                "name": "file2_commit1",
                "coverage": [1, 1, 0]
            },
            {
                "name": "file3_commit2",
                "coverage": [1, 1, 0, 1, None]
            },
        ]
    })
    with hgmo.HGMO(local) as hgmo_server:
        stack = changesets(hgmo_server, revision2)

    with hglib.open(local) as hg:
        results = phabricator.generate(hg, report, stack)

    assert results == {
        revision1: {
            "revision_id": 1,
            "paths": {
                "file1_commit1": {
                    "coverage": "NUCCCCU",
                    "lines_added": 6,
                    "lines_covered": 4,
                    "lines_unknown": 0,
                },
                "file2_commit1": {
                    "coverage": "CCU",
                    "lines_added": 3,
                    "lines_covered": 2,
                    "lines_unknown": 0,
                },
            },
        },
        revision2: {
            "revision_id": 2,
            "paths": {
                "file3_commit2": {
                    "coverage": "CCUCN",
                    "lines_added": 4,
                    "lines_covered": 3,
                    "lines_unknown": 0,
                }
            },
        },
    }
Example #33
0
def test_simple(mock_secrets, mock_phabricator, fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n")
    revision = commit(hg, 1)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    phabricator = PhabricatorUploader(local, revision)
    report = covdir_report(
        {"source_files": [{"name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0]}]}
    )
    results = phabricator.generate(report, changesets(local, revision))

    assert results == {
        1: {"file": {"coverage": "NUCCCCU", "lines_added": 7, "lines_covered": 5}}
    }

    phabricator.upload(report, changesets(local, revision))

    assert len(responses.calls) >= 3

    call = responses.calls[-5]
    assert (
        call.request.url == "http://phabricator.test/api/differential.revision.search"
    )
    params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0])
    assert params["constraints"]["ids"] == [1]

    call = responses.calls[-4]
    assert (
        call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets"
    )
    params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0])
    assert params["objectPHID"] == "PHID-DIFF-test"
    assert params["targetKeys"] == ["arcanist.unit"]

    call = responses.calls[-3]
    assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage"
    params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0])
    assert params["buildTargetPHID"] == "PHID-HMBT-test"
    assert params["type"] == "pass"
    assert params["unit"] == [
        {
            "name": "Aggregate coverage information",
            "result": "pass",
            "coverage": {"file": "NUCCCCU"},
        }
    ]
    assert params["lint"] == []

    call = responses.calls[-2]
    assert (
        call.request.url == "http://phabricator.test/api/harbormaster.queryautotargets"
    )
    params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0])
    assert params["objectPHID"] == "PHID-DIFF-test"
    assert params["targetKeys"] == ["arcanist.lint"]

    call = responses.calls[-1]
    assert call.request.url == "http://phabricator.test/api/harbormaster.sendmessage"
    params = json.loads(urllib.parse.parse_qs(call.request.body)["params"][0])
    assert params["buildTargetPHID"] == "PHID-HMBT-test-lint"
    assert params["type"] == "pass"
    assert params["unit"] == []
    assert params["lint"] == []
Example #34
0
    def go(self):
        if self.from_pulse:
            commit_sha = self.githubUtils.mercurial_to_git(self.revision)
            try:
                uploader.get_codecov(commit_sha)
                logger.warn('Build was already injested')
                return
            except requests.exceptions.HTTPError:
                pass

        with ThreadPoolExecutorResult(max_workers=2) as executor:
            # Thread 1 - Download coverage artifacts.
            executor.submit(self.artifactsHandler.download_all)

            # Thread 2 - Clone mozilla-central.
            executor.submit(self.clone_mozilla_central, self.revision)

        if self.from_pulse:
            self.githubUtils.update_geckodev_repo()

            logger.info('GitHub revision', revision=commit_sha)

            self.githubUtils.post_github_status(commit_sha)

            r = requests.get(
                'https://hg.mozilla.org/mozilla-central/json-rev/%s' %
                self.revision)
            r.raise_for_status()
            push_id = r.json()['pushid']

            output = grcov.report(self.artifactsHandler.get(),
                                  source_dir=self.repo_dir,
                                  service_number=push_id,
                                  commit_sha=commit_sha,
                                  token=secrets[secrets.COVERALLS_TOKEN])
            logger.info('Report generated successfully')

            logger.info('Upload changeset coverage data to Phabricator')
            phabricatorUploader = PhabricatorUploader(self.repo_dir,
                                                      self.revision)
            phabricatorUploader.upload(json.loads(output))

            with ThreadPoolExecutorResult(max_workers=2) as executor:
                executor.submit(uploader.coveralls, output)
                executor.submit(uploader.codecov, output, commit_sha)

            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
                self.notifier.notify()
            else:
                logger.error('codecov.io took too much time to ingest data.')
        else:
            logger.info('Generating suite reports')
            os.makedirs(self.ccov_reports_dir, exist_ok=True)
            suite_reports.generate(self.suites, self.artifactsHandler,
                                   self.ccov_reports_dir, self.repo_dir)

            logger.info('Generating zero coverage reports')
            zc = ZeroCov(self.repo_dir)
            zc.generate(self.artifactsHandler.get(), self.revision,
                        self.github_revision)

            logger.info('Generating chunk mapping')
            chunk_mapping.generate(self.repo_dir, self.revision,
                                   self.artifactsHandler)

            # Index the task in the TaskCluster index at the given revision and as "latest".
            # Given that all tasks have the same rank, the latest task that finishes will
            # overwrite the "latest" entry.
            namespaces = [
                'project.releng.services.project.{}.code_coverage_bot.{}'.
                format(secrets[secrets.APP_CHANNEL], self.revision),
                'project.releng.services.project.{}.code_coverage_bot.latest'.
                format(secrets[secrets.APP_CHANNEL]),
            ]

            for namespace in namespaces:
                self.index_service.insertTask(
                    namespace, {
                        'taskId':
                        os.environ['TASK_ID'],
                        'rank':
                        0,
                        'data': {},
                        'expires':
                        (datetime.utcnow() +
                         timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                    })

            os.chdir(self.ccov_reports_dir)
            self.githubUtils.update_codecoveragereports_repo()
Example #35
0
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator,
                      fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n")
    revision1 = commit(hg, 1)

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")
    revision2 = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    with hgmo.HGMO(local) as hgmo_server:
        stack = changesets(hgmo_server, revision2)
    assert len(stack) == 2
    assert (
        stack[0]["desc"] ==
        "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1"
    )
    assert (
        stack[1]["desc"] ==
        "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2"
    )

    report = covdir_report({
        "source_files": [{
            "name": "file",
            "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0]
        }]
    })
    phab = PhabricatorUploader(local, revision2)
    with hglib.open(local) as hg:
        changesets_coverage = phab.generate(hg, report, stack)

    assert changesets_coverage == {
        revision1: {
            "revision_id": 1,
            "paths": {
                "file": {
                    "lines_added": 3,
                    "lines_covered": 2,
                    "lines_unknown": 0,
                    "coverage": "NCCU",
                }
            },
        },
        revision2: {
            "revision_id": 2,
            "paths": {
                "file": {
                    "lines_added": 6,
                    "lines_covered": 0,
                    "lines_unknown": 0,
                    "coverage": "NCCUUUUUUU",
                }
            },
        },
    }

    mail = notify_email(revision2, stack, changesets_coverage)
    assert (
        mail ==
        """* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://phabricator.services.mozilla.com/D2): 0 covered out of 6 added.\n"""
    )
Example #36
0
    def go_from_trigger_mozilla_central(self):
        commit_sha = self.githubUtils.mercurial_to_git(self.revision)
        try:
            uploader.get_codecov(commit_sha)
            logger.warn('Build was already injested')
            return
        except requests.exceptions.HTTPError:
            pass

        self.retrieve_source_and_artifacts()

        self.githubUtils.update_geckodev_repo()

        logger.info('GitHub revision', revision=commit_sha)

        self.githubUtils.post_github_status(commit_sha)

        r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' %
                         self.revision)
        r.raise_for_status()
        push_id = r.json()['pushid']

        # Check that all JavaScript files present in the coverage artifacts actually exist.
        # If they don't, there might be a bug in the LCOV rewriter.
        for artifact in self.artifactsHandler.get():
            if 'jsvm' not in artifact:
                continue

            with zipfile.ZipFile(artifact, 'r') as zf:
                for file_name in zf.namelist():
                    with zf.open(file_name, 'r') as fl:
                        source_files = [
                            line[3:].decode('utf-8').rstrip() for line in fl
                            if line.startswith(b'SF:')
                        ]
                        missing_files = [
                            f for f in source_files if
                            not os.path.exists(os.path.join(self.repo_dir, f))
                        ]
                        if len(missing_files) != 0:
                            logger.warn(
                                f'{missing_files} are present in coverage reports, but missing from the repository'
                            )

        output = grcov.report(self.artifactsHandler.get(),
                              source_dir=self.repo_dir,
                              service_number=push_id,
                              commit_sha=commit_sha,
                              token=secrets[secrets.COVERALLS_TOKEN])
        logger.info('Report generated successfully')

        report = json.loads(output)
        expected_extensions = ['.js', '.cpp']
        for extension in expected_extensions:
            assert any(
                f['name'].endswith(extension) for f in report['source_files']
            ), 'No {} file in the generated report'.format(extension)

        logger.info('Upload changeset coverage data to Phabricator')
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)
        phabricatorUploader.upload(report)

        with ThreadPoolExecutorResult(max_workers=2) as executor:
            executor.submit(uploader.coveralls, output)
            executor.submit(uploader.codecov, output, commit_sha)

        logger.info('Waiting for build to be ingested by Codecov...')
        # Wait until the build has been ingested by Codecov.
        if uploader.codecov_wait(commit_sha):
            logger.info('Build ingested by codecov.io')
            notifier = Notifier(self.repo_dir, self.revision, self.client_id,
                                self.access_token)
            notifier.notify()
        else:
            logger.error('codecov.io took too much time to ingest data.')