Пример #1
0
    def go(self):
        with ThreadPoolExecutorResult(max_workers=2) as executor:
            # Thread 1 - Download coverage artifacts.
            executor.submit(
                lambda: self.download_coverage_artifacts(self.task_id))

            # Thread 2 - Clone and build mozilla-central
            clone_future = executor.submit(
                lambda: self.clone_mozilla_central(self.revision))
            clone_future.add_done_callback(lambda f: self.build_files())

        if self.gecko_dev_user is not None and self.gecko_dev_pwd is not None:
            self.update_github_repo()

        commit_sha = self.get_github_commit(self.revision)
        logger.info('GitHub revision', revision=commit_sha)

        # TODO: Process suites in parallel.
        # While we are uploading results for a suite, we can start to process the next one.
        # TODO: Reenable when Coveralls and/or Codecov will be able to properly handle the load.
        '''for suite in self.suites:
            output = self.generate_info(commit_sha, self.coveralls_token, suite)

            logger.info('Suite report generated', suite=suite)

            uploader.coveralls(output)
            uploader.codecov(output, commit_sha, self.codecov_token, [suite.replace('-', '_')])'''

        output = self.generate_info(commit_sha, self.coveralls_token)
        logger.info('Report generated successfully')

        with ThreadPoolExecutorResult(max_workers=2) as executor:
            executor.submit(lambda: uploader.coveralls(output))
            executor.submit(lambda: uploader.codecov(output, commit_sha, self.
                                                     codecov_token))

        try:
            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
            else:
                logger.info('codecov.io took too much time to ingest data.')
                return

            # Get pushlog and ask the backend to generate the coverage by changeset
            # data, which will be cached.
            r = requests.get(
                'https://hg.mozilla.org/mozilla-central/json-pushes?changeset=%s&version=2'
                % self.revision)
            data = r.json()
            changesets = data['pushes'][data['lastpushid']]['changesets']

            for changeset in changesets:
                requests.get(
                    'https://uplift.shipit.staging.mozilla-releng.net/coverage/changeset/%s'
                    % changeset)
        except Exception as e:
            logger.warn('Error while requesting coverage data: ' + str(e))
Пример #2
0
    def go(self):
        with ThreadPoolExecutorResult(max_workers=2) as executor:
            # Thread 1 - Download coverage artifacts.
            executor.submit(self.artifactsHandler.download_all)

            # Thread 2 - Clone mozilla-central.
            executor.submit(self.clone_mozilla_central, self.revision)

        if self.from_pulse:
            self.githubUtils.update_geckodev_repo()

            commit_sha = self.githubUtils.get_commit(self.revision)
            logger.info('GitHub revision', revision=commit_sha)

            self.githubUtils.post_github_status(commit_sha)

            r = requests.get(
                'https://hg.mozilla.org/mozilla-central/json-rev/%s' %
                self.revision)
            r.raise_for_status()
            push_id = r.json()['pushid']

            output = grcov.report(self.artifactsHandler.get(),
                                  source_dir=self.repo_dir,
                                  service_number=push_id,
                                  commit_sha=commit_sha,
                                  token=secrets[secrets.COVERALLS_TOKEN])
            logger.info('Report generated successfully')

            with ThreadPoolExecutorResult(max_workers=2) as executor:
                executor.submit(uploader.coveralls, output)
                executor.submit(uploader.codecov, output, commit_sha)

            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
                self.notifier.notify()
            else:
                logger.info('codecov.io took too much time to ingest data.')
        else:
            mkdir('code-coverage-reports')

            # XXX: Disabled as it is unused for now.
            # self.generate_suite_reports()

            report_generators.zero_coverage(self.artifactsHandler.get())

            self.generate_chunk_mapping()

            os.chdir('code-coverage-reports')
            self.githubUtils.update_codecoveragereports_repo()
Пример #3
0
    def go(self):
        with ThreadPoolExecutorResult(max_workers=2) as executor:
            # Thread 1 - Download coverage artifacts.
            executor.submit(self.artifactsHandler.download_all)

            # Thread 2 - Clone mozilla-central.
            executor.submit(self.clone_mozilla_central, self.revision)

        if self.from_pulse:
            self.githubUtils.update_geckodev_repo()

            commit_sha = self.githubUtils.get_commit(self.revision)
            logger.info('GitHub revision', revision=commit_sha)

            self.githubUtils.post_github_status(commit_sha)

            output = self.generate_info(commit_sha)
            logger.info('Report generated successfully')

            with ThreadPoolExecutorResult(max_workers=2) as executor:
                executor.submit(uploader.coveralls, output)
                executor.submit(uploader.codecov, output, commit_sha)

            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
                self.notifier.notify()
            else:
                logger.info('codecov.io took too much time to ingest data.')
        else:
            mkdir('code-coverage-reports')

            self.generate_per_suite_reports()

            self.generate_zero_coverage_report()

            self.generate_chunk_mapping()

            os.chdir('code-coverage-reports')
            self.githubUtils.update_codecoveragereports_repo()
Пример #4
0
    def prepopulate_cache(self, commit_sha):
        try:
            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
            else:
                logger.info('codecov.io took too much time to ingest data.')
                return

            # Get pushlog and ask the backend to generate the coverage by changeset
            # data, which will be cached.
            r = requests.get('https://hg.mozilla.org/mozilla-central/json-pushes?changeset=%s&version=2&full' % self.revision)
            r.raise_for_status()
            data = r.json()
            changesets = data['pushes'][data['lastpushid']]['changesets']

            for changeset in changesets:
                if any(text in changeset['desc'] for text in ['r=merge', 'a=merge']):
                    continue

                requests.get('https://uplift.shipit.staging.mozilla-releng.net/coverage/changeset/%s' % changeset['node'])
        except Exception as e:
            logger.warn('Error while requesting coverage data', error=str(e))
Пример #5
0
    def go(self):
        with ThreadPoolExecutorResult(max_workers=2) as executor:
            # Thread 1 - Download coverage artifacts.
            executor.submit(self.artifactsHandler.download_all)

            # Thread 2 - Clone mozilla-central.
            executor.submit(self.clone_mozilla_central, self.revision)

        if self.from_pulse:
            self.githubUtils.update_geckodev_repo()

            commit_sha = self.githubUtils.get_commit(self.revision)
            logger.info('GitHub revision', revision=commit_sha)

            self.githubUtils.post_github_status(commit_sha)

            r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision)
            r.raise_for_status()
            push_id = r.json()['pushid']

            output = grcov.report(
                self.artifactsHandler.get(),
                source_dir=self.repo_dir,
                service_number=push_id,
                commit_sha=commit_sha,
                token=secrets[secrets.COVERALLS_TOKEN]
            )
            logger.info('Report generated successfully')

            with ThreadPoolExecutorResult(max_workers=2) as executor:
                executor.submit(uploader.coveralls, output)
                executor.submit(uploader.codecov, output, commit_sha)

            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
                self.notifier.notify()
            else:
                logger.error('codecov.io took too much time to ingest data.')
        else:
            os.makedirs('code-coverage-reports', exist_ok=True)

            self.generate_suite_reports()

            report_generators.zero_coverage(self.artifactsHandler.get())

            self.generate_chunk_mapping()

            # Index the task in the TaskCluster index.
            self.index_service.insertTask(
                'project.releng.services.project.{}.shipit_code_coverage.{}'.format(secrets[secrets.APP_CHANNEL], self.revision),
                {
                    'taskId': os.environ['TASK_ID'],
                    'rank': 0,
                    'data': {},
                    'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                }
            )

            os.chdir('code-coverage-reports')
            self.githubUtils.update_codecoveragereports_repo()