def test_get_task(): task_id = taskcluster.get_last_task('linux') task_data = taskcluster.get_task_details(task_id) revision = task_data['payload']['env']['GECKO_HEAD_REV'] assert taskcluster.get_task('mozilla-central', revision, 'linux') == taskcluster.get_last_task('linux') task_id = taskcluster.get_last_task('win') task_data = taskcluster.get_task_details(task_id) revision = task_data['payload']['env']['GECKO_HEAD_REV'] assert taskcluster.get_task('mozilla-central', revision, 'win') == taskcluster.get_last_task('win')
def __init__(self, revision, cache_root, coveralls_token, codecov_token, gecko_dev_user, gecko_dev_pwd, client_id, access_token): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = [ 'cppunit', 'gtest', 'web-platform-tests', 'talos', ] self.cache_root = cache_root assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format(cache_root) self.repo_dir = os.path.join(cache_root, 'mozilla-central') self.gecko_dev_user = gecko_dev_user self.gecko_dev_pwd = gecko_dev_pwd self.client_id = client_id self.access_token = access_token if revision is None: self.task_ids = [ taskcluster.get_last_task('linux'), taskcluster.get_last_task('win'), ] task_data = taskcluster.get_task_details(self.task_ids[0]) self.revision = task_data['payload']['env']['GECKO_HEAD_REV'] self.coveralls_token = 'NONE' self.codecov_token = 'NONE' self.from_pulse = False else: self.task_ids = [ taskcluster.get_task('mozilla-central', revision, 'linux'), taskcluster.get_task('mozilla-central', revision, 'win'), ] self.revision = revision self.coveralls_token = coveralls_token self.codecov_token = codecov_token self.from_pulse = True self.build_finished = False self.build_finished_cv = Condition() if self.from_pulse: self.suites_to_ignore = ['awsy', 'talos'] else: self.suites_to_ignore = [] logger.info('Mercurial revision', revision=self.revision)
def __init__(self, revision, cache_root, coveralls_token, codecov_token, gecko_dev_user, gecko_dev_pwd): # List of test-suite, sorted alphabetically. # This way, the index of a suite in the array should be stable enough. self.suites = [] self.cache_root = cache_root assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format( cache_root) self.repo_dir = os.path.join(cache_root, 'mozilla-central') self.coveralls_token = coveralls_token self.codecov_token = codecov_token self.gecko_dev_user = gecko_dev_user self.gecko_dev_pwd = gecko_dev_pwd if revision is None: self.task_id = taskcluster.get_last_task() task_data = taskcluster.get_task_details(self.task_id) self.revision = task_data['payload']['env']['GECKO_HEAD_REV'] else: self.task_id = taskcluster.get_task('mozilla-central', revision) self.revision = revision self.build_finished = False self.build_finished_cv = Condition() logger.info('Mercurial revision', revision=self.revision)
def go(self): task_id = taskcluster.get_last_task() task_data = taskcluster.get_task_details(task_id) revision = task_data['payload']['env']['GECKO_HEAD_REV'] logger.info('Mercurial revision', revision=revision) self.download_coverage_artifacts(task_id) logger.info('Code coverage artifacts downloaded') self.clone_mozilla_central(revision) logger.info('mozilla-central cloned') self.build_files() logger.info('Build successful') self.rewrite_jsvm_lcov() logger.info('JSVM LCOV files rewritten') commit_sha = self.get_github_commit(revision) logger.info('GitHub revision', revision=commit_sha) coveralls_jobs = [] # TODO: Process suites in parallel. # While we are uploading results for a suite, we can start to process the next one. # TODO: Reenable when Coveralls and/or Codecov will be able to properly handle the load. '''for suite in self.suites: output = self.generate_info(commit_sha, self.coveralls_token, suite) logger.info('Suite report generated', suite=suite) coveralls_jobs.append(uploader.coveralls(output)) uploader.codecov(output, commit_sha, self.codecov_token, [suite.replace('-', '_')])''' output = self.generate_info(commit_sha, self.coveralls_token) logger.info('Report generated successfully') coveralls_jobs.append(uploader.coveralls(output)) uploader.codecov(output, commit_sha, self.codecov_token) logger.info('Waiting for build to be injested by Coveralls...') # Wait until the build has been injested by Coveralls. if all( [uploader.coveralls_wait(job_url) for job_url in coveralls_jobs]): logger.info('Build injested by coveralls') else: logger.info('Coveralls took too much time to injest data.') coverage_by_dir.generate(self.repo_dir)
def go(secrets, client_id=None, client_token=None): tc_client = TaskclusterClient(client_id, client_token) secrets = tc_client.get_secrets(secrets, [TOKEN_FIELD]) coveralls_token = secrets[TOKEN_FIELD] task_id = taskcluster.get_last_task() task_data = taskcluster.get_task_details(task_id) revision = task_data['payload']['env']['GECKO_HEAD_REV'] logger.info('Revision %s' % revision) download_coverage_artifacts(task_id) clone_mozilla_central(revision) build_files() output = generate_info(revision, coveralls_token) coveralls.upload(output)
def disable_test_last_task(): assert taskcluster.get_last_task('linux') is not None assert taskcluster.get_last_task('win') is not None
def disable_test_get_tasks_in_group(): task_id = taskcluster.get_last_task('linux') task_data = taskcluster.get_task_details(task_id) tasks = taskcluster.get_tasks_in_group(task_data['taskGroupId']) assert len(tasks) > 0
def disable_test_get_task_artifacts(): task_id = taskcluster.get_last_task('linux') artifacts = taskcluster.get_task_artifacts(task_id) assert len(artifacts) > 0
def disable_test_get_task_details(): task_id = taskcluster.get_last_task('linux') task_data = taskcluster.get_task_details(task_id) assert task_data is not None assert 'payload' in task_data
def disable_test_get_task_status(): task_id = taskcluster.get_last_task('linux') task_status = taskcluster.get_task_status(task_id) assert task_status is not None assert 'status' in task_status assert 'state' in task_status['status']
def test_get_task_details(): task_id = taskcluster.get_last_task() task_data = taskcluster.get_task_details(task_id) assert task_data is not None assert 'payload' in task_data
def test_last_task(): assert taskcluster.get_last_task() is not None
def test_get_task_artifacts(): task_id = taskcluster.get_last_task() artifacts = taskcluster.get_task_artifacts(task_id) assert len(artifacts) > 0
def test_last_task_failure(TASK_NOT_FOUND): responses.add(responses.GET, 'https://index.taskcluster.net/v1/task/gecko.v2.mozilla-central.latest.firefox.linux64-ccov-opt', json=TASK_NOT_FOUND, status=404) # noqa with pytest.raises(requests.exceptions.HTTPError): taskcluster.get_last_task('linux')
def test_last_task_windows(WIN_TASK_ID, LATEST_WIN): responses.add(responses.GET, 'https://index.taskcluster.net/v1/task/gecko.v2.mozilla-central.latest.firefox.win64-ccov-debug', json=LATEST_WIN, status=200) assert taskcluster.get_last_task('win') == WIN_TASK_ID
def test_last_task_linux(LINUX_TASK_ID, LATEST_LINUX): responses.add(responses.GET, 'https://index.taskcluster.net/v1/task/gecko.v2.mozilla-central.latest.firefox.linux64-ccov-opt', json=LATEST_LINUX, status=200) # noqa assert taskcluster.get_last_task('linux') == LINUX_TASK_ID