Exemplo n.º 1
0
def test_get_task():
    task_id = taskcluster.get_last_task('linux')
    task_data = taskcluster.get_task_details(task_id)
    revision = task_data['payload']['env']['GECKO_HEAD_REV']
    assert taskcluster.get_task('mozilla-central', revision, 'linux') == taskcluster.get_last_task('linux')

    task_id = taskcluster.get_last_task('win')
    task_data = taskcluster.get_task_details(task_id)
    revision = task_data['payload']['env']['GECKO_HEAD_REV']
    assert taskcluster.get_task('mozilla-central', revision, 'win') == taskcluster.get_last_task('win')
Exemplo n.º 2
0
    def download_coverage_artifacts(self, build_task_id):
        try:
            os.mkdir('ccov-artifacts')
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise e

        task_data = taskcluster.get_task_details(build_task_id)

        artifacts = taskcluster.get_task_artifacts(build_task_id)
        for artifact in artifacts:
            if 'target.code-coverage-gcno.zip' in artifact['name']:
                taskcluster.download_artifact(build_task_id, '', artifact)

        all_suites = set()

        tasks = taskcluster.get_tasks_in_group(task_data['taskGroupId'])
        test_tasks = [t for t in tasks if taskcluster.is_coverage_task(t)]
        for test_task in test_tasks:
            suite_name = taskcluster.get_suite_name(test_task)
            all_suites.add(suite_name)
            test_task_id = test_task['status']['taskId']
            artifacts = taskcluster.get_task_artifacts(test_task_id)
            for artifact in artifacts:
                if any(n in artifact['name'] for n in
                       ['code-coverage-gcda.zip', 'code-coverage-jsvm.zip']):
                    taskcluster.download_artifact(test_task_id, suite_name,
                                                  artifact)

        self.suites = list(all_suites)
        self.suites.sort()
Exemplo n.º 3
0
    def __init__(self, revision, cache_root, coveralls_token, codecov_token,
                 gecko_dev_user, gecko_dev_pwd):
        # List of test-suite, sorted alphabetically.
        # This way, the index of a suite in the array should be stable enough.
        self.suites = []

        self.cache_root = cache_root

        assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format(
            cache_root)
        self.repo_dir = os.path.join(cache_root, 'mozilla-central')

        self.coveralls_token = coveralls_token
        self.codecov_token = codecov_token
        self.gecko_dev_user = gecko_dev_user
        self.gecko_dev_pwd = gecko_dev_pwd

        if revision is None:
            self.task_id = taskcluster.get_last_task()

            task_data = taskcluster.get_task_details(self.task_id)
            self.revision = task_data['payload']['env']['GECKO_HEAD_REV']
        else:
            self.task_id = taskcluster.get_task('mozilla-central', revision)
            self.revision = revision

        self.build_finished = False
        self.build_finished_cv = Condition()

        logger.info('Mercurial revision', revision=self.revision)
Exemplo n.º 4
0
    def go(self):
        task_id = taskcluster.get_last_task()

        task_data = taskcluster.get_task_details(task_id)
        revision = task_data['payload']['env']['GECKO_HEAD_REV']
        logger.info('Mercurial revision', revision=revision)

        self.download_coverage_artifacts(task_id)
        logger.info('Code coverage artifacts downloaded')

        self.clone_mozilla_central(revision)
        logger.info('mozilla-central cloned')
        self.build_files()
        logger.info('Build successful')

        self.rewrite_jsvm_lcov()
        logger.info('JSVM LCOV files rewritten')

        commit_sha = self.get_github_commit(revision)
        logger.info('GitHub revision', revision=commit_sha)

        coveralls_jobs = []

        # TODO: Process suites in parallel.
        # While we are uploading results for a suite, we can start to process the next one.
        # TODO: Reenable when Coveralls and/or Codecov will be able to properly handle the load.
        '''for suite in self.suites:
            output = self.generate_info(commit_sha, self.coveralls_token, suite)

            logger.info('Suite report generated', suite=suite)

            coveralls_jobs.append(uploader.coveralls(output))
            uploader.codecov(output, commit_sha, self.codecov_token, [suite.replace('-', '_')])'''

        output = self.generate_info(commit_sha, self.coveralls_token)
        logger.info('Report generated successfully')

        coveralls_jobs.append(uploader.coveralls(output))
        uploader.codecov(output, commit_sha, self.codecov_token)

        logger.info('Waiting for build to be injested by Coveralls...')
        # Wait until the build has been injested by Coveralls.
        if all(
            [uploader.coveralls_wait(job_url) for job_url in coveralls_jobs]):
            logger.info('Build injested by coveralls')
        else:
            logger.info('Coveralls took too much time to injest data.')

        coverage_by_dir.generate(self.repo_dir)
Exemplo n.º 5
0
    def __init__(self, revision, cache_root, coveralls_token, codecov_token,
                 gecko_dev_user, gecko_dev_pwd, client_id, access_token):
        # List of test-suite, sorted alphabetically.
        # This way, the index of a suite in the array should be stable enough.
        self.suites = [
            'cppunit', 'gtest', 'web-platform-tests', 'talos',
        ]

        self.cache_root = cache_root

        assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format(cache_root)
        self.repo_dir = os.path.join(cache_root, 'mozilla-central')

        self.gecko_dev_user = gecko_dev_user
        self.gecko_dev_pwd = gecko_dev_pwd
        self.client_id = client_id
        self.access_token = access_token

        if revision is None:
            self.task_ids = [
                taskcluster.get_last_task('linux'),
                taskcluster.get_last_task('win'),
            ]

            task_data = taskcluster.get_task_details(self.task_ids[0])
            self.revision = task_data['payload']['env']['GECKO_HEAD_REV']
            self.coveralls_token = 'NONE'
            self.codecov_token = 'NONE'
            self.from_pulse = False
        else:
            self.task_ids = [
                taskcluster.get_task('mozilla-central', revision, 'linux'),
                taskcluster.get_task('mozilla-central', revision, 'win'),
            ]
            self.revision = revision
            self.coveralls_token = coveralls_token
            self.codecov_token = codecov_token
            self.from_pulse = True

        self.build_finished = False
        self.build_finished_cv = Condition()

        if self.from_pulse:
            self.suites_to_ignore = ['awsy', 'talos']
        else:
            self.suites_to_ignore = []

        logger.info('Mercurial revision', revision=self.revision)
Exemplo n.º 6
0
    def download_all(self):
        os.makedirs(self.parent_dir, exist_ok=True)

        # The test tasks for the Linux and Windows builds are in the same group,
        # but the following code is generic and supports build tasks split in
        # separate groups.
        groups = set([
            taskcluster.get_task_details(build_task_id)['taskGroupId']
            for build_task_id in self.task_ids.values()
        ])
        test_tasks = [
            task for group in groups
            for task in taskcluster.get_tasks_in_group(group)
            if taskcluster.is_coverage_task(task)
        ]

        # Choose best tasks to download (e.g. 'completed' is better than 'failed')
        download_tasks = {}
        for test_task in test_tasks:
            status = test_task['status']['state']
            assert status in ALL_STATUSES

            chunk_name = taskcluster.get_chunk(
                test_task['task']['metadata']['name'])
            platform_name = taskcluster.get_platform(
                test_task['task']['metadata']['name'])
            # Ignore awsy and talos as they aren't actually suites of tests.
            if any(to_ignore in chunk_name
                   for to_ignore in self.suites_to_ignore):
                continue

            if (chunk_name, platform_name) not in download_tasks:
                # If the chunk hasn't been downloaded before, this is obviously the best task
                # to download it from.
                download_tasks[(chunk_name, platform_name)] = test_task
            else:
                # Otherwise, compare the status of this task with the previously selected task.
                prev_task = download_tasks[(chunk_name, platform_name)]

                if STATUS_VALUE[status] > STATUS_VALUE[prev_task['status']
                                                       ['state']]:
                    download_tasks[(chunk_name, platform_name)] = test_task

        with ThreadPoolExecutorResult() as executor:
            for test_task in test_tasks:
                executor.submit(self.download, test_task)

        logger.info('Code coverage artifacts downloaded')
Exemplo n.º 7
0
    def download_coverage_artifacts(self, build_task_id):
        try:
            os.mkdir('ccov-artifacts')
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise e

        task_data = taskcluster.get_task_details(build_task_id)

        all_suites = set()

        def rewriting_task(path):
            return lambda: self.rewrite_jsvm_lcov(path)

        tasks = taskcluster.get_tasks_in_group(task_data['taskGroupId'])
        test_tasks = [t for t in tasks if taskcluster.is_coverage_task(t)]
        with ThreadPoolExecutorResult() as executor:
            for test_task in test_tasks:
                suite_name = taskcluster.get_suite_name(test_task)
                # Ignore awsy and talos as they aren't actually suites of tests.
                if any(to_ignore in suite_name
                       for to_ignore in ['awsy', 'talos']):
                    continue

                all_suites.add(suite_name)

                test_task_id = test_task['status']['taskId']
                for artifact in taskcluster.get_task_artifacts(test_task_id):
                    if not any(
                            n in artifact['name'] for n in
                        ['code-coverage-grcov.zip', 'code-coverage-jsvm.zip']):
                        continue

                    artifact_path = taskcluster.download_artifact(
                        test_task_id, suite_name, artifact)
                    if 'code-coverage-jsvm.zip' in artifact['name']:
                        executor.submit(rewriting_task(artifact_path))

            self.suites = list(all_suites)
            self.suites.sort()

            logger.info('Code coverage artifacts downloaded')
Exemplo n.º 8
0
def download_coverage_artifacts(build_task_id):
    try:
        os.mkdir('ccov-artifacts')
    except:
        pass

    task_data = taskcluster.get_task_details(build_task_id)

    artifacts = taskcluster.get_task_artifacts(build_task_id)
    for artifact in artifacts:
        if 'target.code-coverage-gcno.zip' in artifact['name']:
            taskcluster.download_artifact(build_task_id, artifact)

    tasks = taskcluster.get_tasks_in_group(task_data['taskGroupId'])
    test_tasks = [t for t in tasks if is_coverage_task(t)]
    for test_task in test_tasks:
        test_task_id = test_task['status']['taskId']
        artifacts = taskcluster.get_task_artifacts(test_task_id)
        for artifact in artifacts:
            if 'code-coverage-gcda.zip' in artifact['name']:
                taskcluster.download_artifact(test_task_id, artifact)
Exemplo n.º 9
0
def go(secrets, client_id=None, client_token=None):
    tc_client = TaskclusterClient(client_id, client_token)

    secrets = tc_client.get_secrets(secrets, [TOKEN_FIELD])

    coveralls_token = secrets[TOKEN_FIELD]

    task_id = taskcluster.get_last_task()

    task_data = taskcluster.get_task_details(task_id)
    revision = task_data['payload']['env']['GECKO_HEAD_REV']
    logger.info('Revision %s' % revision)

    download_coverage_artifacts(task_id)

    clone_mozilla_central(revision)
    build_files()

    output = generate_info(revision, coveralls_token)

    coveralls.upload(output)
Exemplo n.º 10
0
def disable_test_get_tasks_in_group():
    task_id = taskcluster.get_last_task('linux')
    task_data = taskcluster.get_task_details(task_id)
    tasks = taskcluster.get_tasks_in_group(task_data['taskGroupId'])
    assert len(tasks) > 0
Exemplo n.º 11
0
def disable_test_get_task_details():
    task_id = taskcluster.get_last_task('linux')
    task_data = taskcluster.get_task_details(task_id)
    assert task_data is not None
    assert 'payload' in task_data
Exemplo n.º 12
0
    def download_coverage_artifacts(self):
        mkdir('ccov-artifacts')

        # The test tasks for the Linux and Windows builds are in the same group,
        # but the following code is generic and supports build tasks split in
        # separate groups.
        groups = set([taskcluster.get_task_details(build_task_id)['taskGroupId'] for build_task_id in self.task_ids])
        test_tasks = [
            task
            for group in groups
            for task in taskcluster.get_tasks_in_group(group)
            if taskcluster.is_coverage_task(task)
        ]

        FINISHED_STATUSES = ['completed', 'failed', 'exception']
        ALL_STATUSES = FINISHED_STATUSES + ['unscheduled', 'pending', 'running']

        downloaded_tasks = {}
        downloaded_tasks_lock = Lock()

        def should_download(status, chunk_name, platform_name):
            with downloaded_tasks_lock:
                if (chunk_name, platform_name) not in downloaded_tasks:
                    return True

                other_status = downloaded_tasks[(chunk_name, platform_name)]

                if (status == 'failed' and other_status == 'exception') or (status == 'completed' and other_status != 'completed'):
                    downloaded_tasks[(chunk_name, platform_name)] = status
                    return True
                else:
                    return False

        def download_artifact(test_task):
            status = test_task['status']['state']
            assert status in ALL_STATUSES
            while status not in FINISHED_STATUSES:
                time.sleep(60)
                status = taskcluster.get_task_status(test_task['status']['taskId'])['status']['state']
                assert status in ALL_STATUSES

            chunk_name = taskcluster.get_chunk_name(test_task)
            platform_name = taskcluster.get_platform_name(test_task)
            # Ignore awsy and talos as they aren't actually suites of tests.
            if any(to_ignore in chunk_name for to_ignore in self.suites_to_ignore):
                return

            # If we have already downloaded this chunk from another task, check if the
            # other task has a better status than this one.
            if not should_download(status, chunk_name, platform_name):
                return

            test_task_id = test_task['status']['taskId']
            for artifact in taskcluster.get_task_artifacts(test_task_id):
                if not any(n in artifact['name'] for n in ['code-coverage-grcov.zip', 'code-coverage-jsvm.zip']):
                    continue

                artifact_path = taskcluster.download_artifact(test_task_id, chunk_name, platform_name, artifact)
                logger.info('%s artifact downloaded' % artifact_path)
                if 'code-coverage-jsvm.zip' in artifact['name']:
                    self.rewrite_jsvm_lcov(artifact_path)
                    logger.info('%s artifact rewritten' % artifact_path)

        def download_artifact_task(test_task):
            return lambda: download_artifact(test_task)

        with ThreadPoolExecutorResult() as executor:
            for test_task in test_tasks:
                executor.submit(download_artifact_task(test_task))

        logger.info('Code coverage artifacts downloaded')
Exemplo n.º 13
0
def test_get_task_details():
    task_id = taskcluster.get_last_task()
    task_data = taskcluster.get_task_details(task_id)
    assert task_data is not None
    assert 'payload' in task_data
Exemplo n.º 14
0
def test_get_task_details(LINUX_TASK_ID, LINUX_TASK):
    responses.add(responses.GET, 'https://queue.taskcluster.net/v1/task/{}'.format(LINUX_TASK_ID), json=LINUX_TASK, status=200)
    assert taskcluster.get_task_details(LINUX_TASK_ID) == LINUX_TASK