Esempio n. 1
0
    def go_from_cron(self):
        self.retrieve_source_and_artifacts()

        logger.info('Generating suite reports')
        os.makedirs(self.ccov_reports_dir, exist_ok=True)
        suite_reports.generate(self.suites, self.artifactsHandler, self.ccov_reports_dir, self.repo_dir)

        logger.info('Generating zero coverage reports')
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision, self.github_revision)

        logger.info('Generating chunk mapping')
        chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        namespaces = [
            'project.releng.services.project.{}.code_coverage_bot.{}'.format(secrets[secrets.APP_CHANNEL], self.revision),
            'project.releng.services.project.{}.code_coverage_bot.latest'.format(secrets[secrets.APP_CHANNEL]),
        ]

        for namespace in namespaces:
            self.index_service.insertTask(
                namespace,
                {
                    'taskId': os.environ['TASK_ID'],
                    'rank': 0,
                    'data': {},
                    'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                }
            )

        os.chdir(self.ccov_reports_dir)
        self.githubUtils.update_codecoveragereports_repo()
Esempio n. 2
0
    def go_from_cron(self):
        self.retrieve_source_and_artifacts()

        logger.info('Generating suite reports')
        os.makedirs(self.ccov_reports_dir, exist_ok=True)
        suite_reports.generate(self.suites, self.artifactsHandler, self.ccov_reports_dir, self.repo_dir)

        logger.info('Generating zero coverage reports')
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision, self.github_revision)

        logger.info('Generating chunk mapping')
        chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        namespaces = [
            'project.releng.services.project.{}.code_coverage_bot.{}'.format(secrets[secrets.APP_CHANNEL], self.revision),
            'project.releng.services.project.{}.code_coverage_bot.latest'.format(secrets[secrets.APP_CHANNEL]),
        ]

        for namespace in namespaces:
            self.index_service.insertTask(
                namespace,
                {
                    'taskId': os.environ['TASK_ID'],
                    'rank': 0,
                    'data': {},
                    'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                }
            )

        os.chdir(self.ccov_reports_dir)
        self.githubUtils.update_codecoveragereports_repo()
Esempio n. 3
0
    def go_from_cron(self):
        self.retrieve_source_and_artifacts()

        logger.info("Generating zero coverage reports")
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision)

        logger.info("Generating chunk mapping")
        chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        namespaces = [
            "project.releng.services.project.{}.code_coverage_bot.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision
            ),
            "project.releng.services.project.{}.code_coverage_bot.latest".format(
                secrets[secrets.APP_CHANNEL]
            ),
        ]

        for namespace in namespaces:
            self.index_service.insertTask(
                namespace,
                {
                    "taskId": os.environ["TASK_ID"],
                    "rank": 0,
                    "data": {},
                    "expires": (datetime.utcnow() + timedelta(180)).strftime(
                        "%Y-%m-%dT%H:%M:%S.%fZ"
                    ),
                },
            )
Esempio n. 4
0
    def run(self):
        self.retrieve_source_and_artifacts()

        logger.info("Generating zero coverage reports")
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision)

        logger.info("Generating chunk mapping")
        chunk_mapping.generate(self.repo_dir, self.revision,
                               self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        self.index_task([
            "project.relman.code-coverage.{}.cron.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision),
            "project.relman.code-coverage.{}.cron.latest".format(
                secrets[secrets.APP_CHANNEL]),
        ])
Esempio n. 5
0
def test_zero_coverage(tmpdir, fake_artifacts_handler,
                       fake_hg_repo_with_contents):
    tmp_path = tmpdir.strpath

    def request_callback(request):
        payload = json.loads(request.body.decode('utf-8'))

        print(payload)

        if payload['from'] == 'coverage':
            if 'groupby' in payload:
                if payload['groupby'] == ['test.suite']:
                    data = [
                        ['chrome', 2],
                        ['jsreftest', 1],
                    ]
                elif payload['groupby'] == ['test.name']:
                    assert payload['where']['and'][4]['in']['test.suite'] == [
                        'chrome', 'jsreftest'
                    ]
                    data = [
                        ['js/xpconnect/tests/unit/test_lazyproxy.js', 60],
                        [
                            'netwerk/test/unit/test_substituting_protocol_handler.js',
                            55
                        ],
                    ]
                else:
                    assert False, 'Unexpected groupby'
            elif 'select' in payload:
                if payload['select'] == ['source.file.name', 'test.name']:
                    data = {
                        'source.file.name': [
                            'js/src/vm/TraceLogging.cpp',
                            'gfx/skia/skia/src/pathops/SkPathOpsQuad.cpp',
                        ],
                        'test.name': [
                            'js/xpconnect/tests/unit/test_lazyproxy.js',
                            'netwerk/test/unit/test_substituting_protocol_handler.js',
                        ],
                    }
                else:
                    assert False, 'Unexpected select'
            else:
                assert False, 'Unexpected payload'
        elif payload['from'] == 'unittest':
            if 'groupby' in payload:
                if payload['groupby'] == ['run.suite.fullname']:
                    data = [
                        ['marionette', 3590],
                        ['gtest', 2078],
                        ['talos', 3000],
                    ]
                else:
                    assert False, 'Unexpected groupby'
            elif 'select' in payload:
                if payload['select'] == ['result.test', 'run.key']:
                    requested_suite = payload['where']['and'][2]['eq'][
                        'run.suite.fullname']
                    if requested_suite == 'gtest':
                        data = {}
                    elif requested_suite == 'marionette':
                        prefix = payload['where']['and'][3]['prefix'][
                            'run.key']
                        if prefix == 'test-linux64-ccov':
                            data = {
                                'result.test': [
                                    'marionette-test1',
                                ],
                                'run.key': [
                                    'test-linux64-ccov/debug-marionette-headless-e10s',
                                ],
                            }
                        elif prefix == 'test-windows10-64-ccov':
                            data = {
                                'result.test': [
                                    'marionette-test2',
                                ],
                                'run.key': [
                                    'test-windows10-64-ccov/debug-marionette-e10s',
                                ],
                            }
                        else:
                            assert False, 'Unexpected prefix'
                    else:
                        assert False, 'Unexpected suite'
                else:
                    assert False, 'Unexpected select'
            else:
                assert False, 'Unexpected payload'
        else:
            assert False, 'Unexpected from'

        return (200, {}, json.dumps({'data': data}))

    responses.add_callback(
        responses.POST,
        chunk_mapping.ACTIVEDATA_QUERY_URL,
        callback=request_callback,
        content_type='application/json',
    )

    chunk_mapping.generate(
        fake_hg_repo_with_contents,
        '632bb768b1dd4b96a196412e8f7b669ca09d6d91',
        fake_artifacts_handler,
        out_dir=tmp_path,
    )

    with tarfile.open(os.path.join(tmp_path, 'chunk_mapping.tar.xz')) as t:
        t.extract('chunk_mapping.sqlite', tmp_path)

    with sqlite3.connect(os.path.join(tmp_path,
                                      'chunk_mapping.sqlite')) as conn:
        c = conn.cursor()

        assert_file_to_test(c, 'js/src/vm/TraceLogging.cpp',
                            'js/xpconnect/tests/unit/test_lazyproxy.js')
        assert_file_to_test(
            c, 'gfx/skia/skia/src/pathops/SkPathOpsQuad.cpp',
            'netwerk/test/unit/test_substituting_protocol_handler.js')

        assert_file_to_chunk(c, 'js/src/jit/BitSet.cpp', 'linux', 'chunk1')
        assert_file_to_chunk(c, 'toolkit/components/osfile/osfile.jsm',
                             'linux', 'chunk2')
        assert_file_to_chunk(c, 'code_coverage_bot/cli.py', 'windows',
                             'chunk1')
        assert_file_to_chunk(c, 'js/src/jit/JIT.cpp', 'windows', 'chunk2')

        assert_chunk_to_test(c, 'linux', 'marionette-headless',
                             ['marionette-test1'])
        assert_chunk_to_test(c, 'windows', 'marionette', ['marionette-test2'])
def test_zero_coverage(tmpdir, fake_artifacts_handler, fake_hg_repo_with_contents):
    tmp_path = tmpdir.strpath

    def request_callback(request):
        payload = json.loads(request.body.decode('utf-8'))

        print(payload)

        if payload['from'] == 'coverage':
            if 'groupby' in payload:
                if payload['groupby'] == ['test.suite']:
                    data = [
                        ['chrome', 2],
                        ['jsreftest', 1],
                    ]
                elif payload['groupby'] == ['test.name']:
                    assert payload['where']['and'][4]['in']['test.suite'] == ['chrome', 'jsreftest']
                    data = [
                        ['js/xpconnect/tests/unit/test_lazyproxy.js', 60],
                        ['netwerk/test/unit/test_substituting_protocol_handler.js', 55],
                    ]
                else:
                    assert False, 'Unexpected groupby'
            elif 'select' in payload:
                if payload['select'] == ['source.file.name', 'test.name']:
                    data = {
                        'source.file.name': [
                            'js/src/vm/TraceLogging.cpp',
                            'gfx/skia/skia/src/pathops/SkPathOpsQuad.cpp',
                        ],
                        'test.name': [
                            'js/xpconnect/tests/unit/test_lazyproxy.js',
                            'netwerk/test/unit/test_substituting_protocol_handler.js',
                        ],
                    }
                else:
                    assert False, 'Unexpected select'
            else:
                assert False, 'Unexpected payload'
        elif payload['from'] == 'unittest':
            if 'groupby' in payload:
                if payload['groupby'] == ['run.suite.fullname']:
                    data = [
                        ['marionette', 3590],
                        ['gtest', 2078],
                        ['talos', 3000],
                    ]
                else:
                    assert False, 'Unexpected groupby'
            elif 'select' in payload:
                if payload['select'] == ['result.test', 'run.key']:
                    requested_suite = payload['where']['and'][2]['eq']['run.suite.fullname']
                    if requested_suite == 'gtest':
                        data = {}
                    elif requested_suite == 'marionette':
                        prefix = payload['where']['and'][3]['prefix']['run.key']
                        if prefix == 'test-linux64-ccov':
                            data = {
                                'result.test': [
                                    'marionette-test1',
                                ],
                                'run.key': [
                                    'test-linux64-ccov/debug-marionette-headless-e10s',
                                ],
                            }
                        elif prefix == 'test-windows10-64-ccov':
                            data = {
                                'result.test': [
                                    'marionette-test2',
                                ],
                                'run.key': [
                                    'test-windows10-64-ccov/debug-marionette-e10s',
                                ],
                            }
                        else:
                            assert False, 'Unexpected prefix'
                    else:
                        assert False, 'Unexpected suite'
                else:
                    assert False, 'Unexpected select'
            else:
                assert False, 'Unexpected payload'
        else:
            assert False, 'Unexpected from'

        return (200, {}, json.dumps({'data': data}))

    responses.add_callback(
        responses.POST, chunk_mapping.ACTIVEDATA_QUERY_URL,
        callback=request_callback,
        content_type='application/json',
    )

    chunk_mapping.generate(
        fake_hg_repo_with_contents,
        '632bb768b1dd4b96a196412e8f7b669ca09d6d91',
        fake_artifacts_handler,
        out_dir=tmp_path,
    )

    with tarfile.open(os.path.join(tmp_path, 'chunk_mapping.tar.xz')) as t:
        t.extract('chunk_mapping.sqlite', tmp_path)

    with sqlite3.connect(os.path.join(tmp_path, 'chunk_mapping.sqlite')) as conn:
        c = conn.cursor()

        assert_file_to_test(c, 'js/src/vm/TraceLogging.cpp', 'js/xpconnect/tests/unit/test_lazyproxy.js')
        assert_file_to_test(c, 'gfx/skia/skia/src/pathops/SkPathOpsQuad.cpp', 'netwerk/test/unit/test_substituting_protocol_handler.js')

        assert_file_to_chunk(c, 'js/src/jit/BitSet.cpp', 'linux', 'chunk1')
        assert_file_to_chunk(c, 'toolkit/components/osfile/osfile.jsm', 'linux', 'chunk2')
        assert_file_to_chunk(c, 'code_coverage_bot/cli.py', 'windows', 'chunk1')
        assert_file_to_chunk(c, 'js/src/jit/JIT.cpp', 'windows', 'chunk2')

        assert_chunk_to_test(c, 'linux', 'marionette-headless', ['marionette-test1'])
        assert_chunk_to_test(c, 'windows', 'marionette', ['marionette-test2'])
def test_zero_coverage(tmpdir, fake_artifacts_handler, fake_hg_repo_with_contents):
    tmp_path = tmpdir.strpath

    def request_callback(request):
        payload = json.loads(request.body.decode("utf-8"))

        from pprint import pprint

        pprint(payload)

        if payload["from"] == "coverage":
            if "groupby" in payload:
                if payload["groupby"] == ["test.suite"]:
                    data = [["chrome", 2], ["jsreftest", 1]]
                elif payload["groupby"] == ["test.name"]:
                    assert payload["where"]["and"][4]["in"]["test.suite"] == [
                        "chrome",
                        "jsreftest",
                    ]
                    data = [
                        ["js/xpconnect/tests/unit/test_lazyproxy.js", 60],
                        ["netwerk/test/unit/test_substituting_protocol_handler.js", 55],
                    ]
                else:
                    assert False, "Unexpected groupby"
            elif "select" in payload:
                if payload["select"] == ["source.file.name", "test.name"]:
                    data = {
                        "source.file.name": [
                            "js/src/vm/TraceLogging.cpp",
                            "gfx/skia/skia/src/pathops/SkPathOpsQuad.cpp",
                        ],
                        "test.name": [
                            "js/xpconnect/tests/unit/test_lazyproxy.js",
                            "netwerk/test/unit/test_substituting_protocol_handler.js",
                        ],
                    }
                else:
                    assert False, "Unexpected select"
            else:
                assert False, "Unexpected payload"
        elif payload["from"] == "unittest":
            if "groupby" in payload:
                if payload["groupby"] == ["run.suite.fullname"]:
                    data = [["marionette", 3590], ["gtest", 2078], ["talos", 3000]]
                else:
                    assert False, "Unexpected groupby"
            elif "select" in payload:
                if payload["select"] == ["result.test", "run.key"]:
                    requested_suite = payload["where"]["and"][2]["eq"][
                        "run.suite.fullname"
                    ]
                    if requested_suite in ["gtest", "talos"]:
                        data = {}
                    elif requested_suite == "marionette":
                        regexp = payload["where"]["and"][3]["regexp"]["run.key"]
                        if regexp == ".*-linux.*-ccov.*/.*":
                            data = {
                                "result.test": ["marionette-test1"],
                                "run.key": [
                                    "test-linux64-ccov/opt-marionette-headless-e10s"
                                ],
                            }
                        elif regexp == ".*-windows.*-ccov.*/.*":
                            data = {
                                "result.test": ["marionette-test2"],
                                "run.key": [
                                    "test-windows10-64-ccov/debug-marionette-e10s"
                                ],
                            }
                        else:
                            assert False, "Unexpected regexp"
                    else:
                        assert False, "Unexpected suite"
                else:
                    assert False, "Unexpected select"
            else:
                assert False, "Unexpected payload"
        else:
            assert False, "Unexpected from"

        return (200, {}, json.dumps({"data": data}))

    responses.add_callback(
        responses.POST,
        chunk_mapping.ACTIVEDATA_QUERY_URL,
        callback=request_callback,
        content_type="application/json",
    )

    chunk_mapping.generate(
        fake_hg_repo_with_contents,
        "632bb768b1dd4b96a196412e8f7b669ca09d6d91",
        fake_artifacts_handler,
        out_dir=tmp_path,
    )

    with tarfile.open(os.path.join(tmp_path, "chunk_mapping.tar.xz")) as t:
        t.extract("chunk_mapping.sqlite", tmp_path)

    with sqlite3.connect(os.path.join(tmp_path, "chunk_mapping.sqlite")) as conn:
        c = conn.cursor()

        assert_file_to_test(
            c, "js/src/vm/TraceLogging.cpp", "js/xpconnect/tests/unit/test_lazyproxy.js"
        )
        assert_file_to_test(
            c,
            "gfx/skia/skia/src/pathops/SkPathOpsQuad.cpp",
            "netwerk/test/unit/test_substituting_protocol_handler.js",
        )

        assert_file_to_chunk(c, "js/src/jit/BitSet.cpp", [("linux", "chunk1")])
        assert_file_to_chunk(
            c, "toolkit/components/osfile/osfile.jsm", [("linux", "chunk2")]
        )
        assert_file_to_chunk(c, "code_coverage_bot/cli.py", [("windows", "chunk1")])
        assert_file_to_chunk(c, "js/src/jit/JIT.cpp", [("windows", "chunk2")])

        assert_chunk_to_test(c, "linux", "marionette-headless", ["marionette-test1"])
        assert_chunk_to_test(c, "windows", "marionette", ["marionette-test2"])

    with tarfile.open(os.path.join(tmp_path, "per_chunk_mapping.tar.xz")) as t:
        t.extract("per_chunk_mapping.sqlite", tmp_path)

    with sqlite3.connect(os.path.join(tmp_path, "per_chunk_mapping.sqlite")) as conn:
        c = conn.cursor()

        assert_file_to_chunk(
            c,
            "js/src/jit/BitSet.cpp",
            [("linux", "chunk1"), ("linux", "mochitest"), ("windows", "mochitest")],
        )
        assert_file_to_chunk(
            c, "toolkit/components/osfile/osfile.jsm", [("linux", "chunk2")]
        )
        assert_file_to_chunk(c, "code_coverage_bot/cli.py", [("windows", "chunk1")])
        assert_file_to_chunk(c, "js/src/jit/JIT.cpp", [("windows", "chunk2")])

        assert_chunk_to_test(c, "linux", "marionette-headless", ["marionette-test1"])
        assert_chunk_to_test(c, "windows", "marionette", ["marionette-test2"])
Esempio n. 8
0
    def go(self):
        if self.from_pulse:
            commit_sha = self.githubUtils.mercurial_to_git(self.revision)
            try:
                uploader.get_codecov(commit_sha)
                logger.warn('Build was already injested')
                return
            except requests.exceptions.HTTPError:
                pass

        with ThreadPoolExecutorResult(max_workers=2) as executor:
            # Thread 1 - Download coverage artifacts.
            executor.submit(self.artifactsHandler.download_all)

            # Thread 2 - Clone mozilla-central.
            executor.submit(self.clone_mozilla_central, self.revision)

        if self.from_pulse:
            self.githubUtils.update_geckodev_repo()

            logger.info('GitHub revision', revision=commit_sha)

            self.githubUtils.post_github_status(commit_sha)

            r = requests.get(
                'https://hg.mozilla.org/mozilla-central/json-rev/%s' %
                self.revision)
            r.raise_for_status()
            push_id = r.json()['pushid']

            output = grcov.report(self.artifactsHandler.get(),
                                  source_dir=self.repo_dir,
                                  service_number=push_id,
                                  commit_sha=commit_sha,
                                  token=secrets[secrets.COVERALLS_TOKEN])
            logger.info('Report generated successfully')

            logger.info('Upload changeset coverage data to Phabricator')
            phabricatorUploader = PhabricatorUploader(self.repo_dir,
                                                      self.revision)
            phabricatorUploader.upload(json.loads(output))

            with ThreadPoolExecutorResult(max_workers=2) as executor:
                executor.submit(uploader.coveralls, output)
                executor.submit(uploader.codecov, output, commit_sha)

            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
                self.notifier.notify()
            else:
                logger.error('codecov.io took too much time to ingest data.')
        else:
            logger.info('Generating suite reports')
            os.makedirs(self.ccov_reports_dir, exist_ok=True)
            suite_reports.generate(self.suites, self.artifactsHandler,
                                   self.ccov_reports_dir, self.repo_dir)

            logger.info('Generating zero coverage reports')
            zc = ZeroCov(self.repo_dir)
            zc.generate(self.artifactsHandler.get(), self.revision,
                        self.github_revision)

            logger.info('Generating chunk mapping')
            chunk_mapping.generate(self.repo_dir, self.revision,
                                   self.artifactsHandler)

            # Index the task in the TaskCluster index at the given revision and as "latest".
            # Given that all tasks have the same rank, the latest task that finishes will
            # overwrite the "latest" entry.
            namespaces = [
                'project.releng.services.project.{}.code_coverage_bot.{}'.
                format(secrets[secrets.APP_CHANNEL], self.revision),
                'project.releng.services.project.{}.code_coverage_bot.latest'.
                format(secrets[secrets.APP_CHANNEL]),
            ]

            for namespace in namespaces:
                self.index_service.insertTask(
                    namespace, {
                        'taskId':
                        os.environ['TASK_ID'],
                        'rank':
                        0,
                        'data': {},
                        'expires':
                        (datetime.utcnow() +
                         timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                    })

            os.chdir(self.ccov_reports_dir)
            self.githubUtils.update_codecoveragereports_repo()