def gcp(repository, revision, data): ''' Upload a grcov raw report on Google Cloud Storage * Compress with zstandard * Upload on bucket using revision in name * Trigger ingestion on channel's backend ''' assert isinstance(data, bytes) bucket = get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE]) # Compress report compressor = zstd.ZstdCompressor() archive = compressor.compress(data) # Upload archive path = GCP_COVDIR_PATH.format(repository=repository, revision=revision) blob = bucket.blob(path) blob.upload_from_string(archive) # Update headers blob.content_type = 'application/json' blob.content_encoding = 'zstd' blob.patch() logger.info('Uploaded {} on {}'.format(path, bucket)) # Trigger ingestion on backend utils.retry(lambda: gcp_ingest(repository, revision), retries=5) return blob
def update_codecoveragereports_repo(self): if self.gecko_dev_user is None or self.gecko_dev_pwd is None: return run_check(['git', 'config', '--global', 'http.postBuffer', '12M']) run_check(['git', 'config', '--global', 'user.email', '*****@*****.**']) run_check(['git', 'config', '--global', 'user.name', 'Report Uploader']) repo_url = 'https://%s:%[email protected]/marco-c/code-coverage-reports' % (self.gecko_dev_user, self.gecko_dev_pwd) run_check(['git', 'init']) run_check(['git', 'add', '*']) run_check(['git', 'commit', '-m', 'Coverage reports upload']) retry(lambda: run_check(['git', 'push', repo_url, 'master', '--force']))
def download_artifact(artifact_path, task_id, artifact_name): if os.path.exists(artifact_path): return artifact_path def perform_download(): r = requests.get(queue_base + 'task/%s/artifacts/%s' % (task_id, artifact_name), stream=True) with open(artifact_path, 'wb') as f: r.raw.decode_content = True shutil.copyfileobj(r.raw, f) if artifact_path.endswith('.zip') and not is_zipfile(artifact_path): raise BadZipFile('File is not a zip file') retry(perform_download)
def test_retry(): assert utils.retry(lambda: True) is True assert utils.retry(lambda: False) is False with pytest.raises(Exception): utils.retry(do_raise, wait_between_retries=0) i = {} def try_twice(): if 'tried' in i: return else: i['tried'] = True raise Exception('Please try again.') assert utils.retry(try_twice, wait_between_retries=0) is None
def update_codecoveragereports_repo(self): if self.gecko_dev_user is None or self.gecko_dev_pwd is None: return run_check(['git', 'config', '--global', 'http.postBuffer', '12M']) run_check( ['git', 'config', '--global', 'user.email', '*****@*****.**']) run_check( ['git', 'config', '--global', 'user.name', 'Report Uploader']) repo_url = 'https://%s:%[email protected]/marco-c/code-coverage-reports' % ( self.gecko_dev_user, self.gecko_dev_pwd) run_check(['git', 'init']) run_check(['git', 'add', '*']) run_check(['git', 'commit', '-m', 'Coverage reports upload']) retry( lambda: run_check(['git', 'push', repo_url, 'master', '--force']))
def get_latest_codecov(): def get_latest_codecov_int(): r = requests.get('https://codecov.io/api/gh/{}?access_token={}'.format(secrets[secrets.CODECOV_REPO], secrets[secrets.CODECOV_ACCESS_TOKEN])) r.raise_for_status() return r.json()['commit']['commitid'] return utils.retry(get_latest_codecov_int)
def download_artifact(artifact_path, task_id, artifact_name): if os.path.exists(artifact_path): return artifact_path def perform_download(): r = requests.get(queue_base + 'task/{}/artifacts/{}'.format(task_id, artifact_name), stream=True) r.raise_for_status() with open(artifact_path, 'wb') as f: r.raw.decode_content = True shutil.copyfileobj(r.raw, f) if artifact_path.endswith('.zip') and not is_zipfile(artifact_path): raise BadZipFile('File is not a zip file') retry(perform_download)
def notify(self): content = '' # Get pushlog and ask the backend to generate the coverage by changeset # data, which will be cached. with hgmo.HGMO(self.repo_dir) as url: url += '/json-pushes' r = requests.get(url, params={ 'changeset': self.revision, 'version': 2, 'full': 1 }) r.raise_for_status() push_data = r.json() changesets = sum( (data['changesets'] for data in push_data['pushes'].values()), []) for changeset in changesets: desc = changeset['desc'].split('\n')[0] if any(text in desc for text in ['r=merge', 'a=merge']): continue rev = changeset['node'] try: coverage = retry(lambda: self.get_coverage_summary(rev)) except (requests.exceptions.HTTPError, ResultNotReadyException): logger.warn('Failure to retrieve coverage summary') continue if coverage['commit_covered'] < 0.2 * coverage['commit_added']: content += '* [{}](https://firefox-code-coverage.herokuapp.com/#/changeset/{}): {} covered out of {} added.\n'.format( desc, rev, coverage['commit_covered'], coverage['commit_added']) # noqa if content == '': return elif len(content) > 102400: # Content is 102400 chars max content = content[:102000] + '\n\n... Content max limit reached!' for email in secrets[secrets.EMAIL_ADDRESSES]: self.notify_service.email({ 'address': email, 'subject': 'Coverage patches for {}'.format(self.revision), 'content': content, 'template': 'fullscreen', })
def git_to_mercurial(self, github_commit): def mercurial_to_git(): r = requests.get('{}/gecko-dev/rev/git/{}'.format(self.hg_git_mapper, github_commit)) if not r.ok: raise Exception('Failed mapping git commit to mercurial commit.') return r.text.split(' ')[1] return retry(mercurial_to_git, retries=30)
def git_to_mercurial(self, github_commit): def mercurial_to_git(): r = requests.get('{}/gecko-dev/rev/git/{}'.format( self.hg_git_mapper, github_commit)) if not r.ok: raise Exception( 'Failed mapping git commit to mercurial commit.') return r.text.split(' ')[1] return retry(mercurial_to_git, retries=30)
def get_mercurial(self, github_commit): def get_commit(): r = requests.get( 'https://api.pub.build.mozilla.org/mapper/gecko-dev/rev/git/%s' % github_commit) if not r.ok: raise Exception( 'Failed mapping git commit to mercurial commit.') return r.text.split(' ')[1] return retry(get_commit, retries=30)
def mercurial_to_git(self, mercurial_commit): def mercurial_to_git(): r = requests.get('{}/gecko-dev/rev/hg/{}'.format( self.hg_git_mapper, mercurial_commit)) if not r.ok: raise Exception( 'Mercurial commit is not available yet on mozilla/gecko-dev.' ) return r.text.split(' ')[0] return retry(mercurial_to_git, retries=30)
def get_commit(self, mercurial_commit): def get_commit(): r = requests.get( 'https://api.pub.build.mozilla.org/mapper/gecko-dev/rev/hg/%s' % mercurial_commit) if not r.ok: raise Exception( 'Mercurial commit is not available yet on mozilla/gecko-dev.' ) return r.text.split(' ')[0] return retry(get_commit, retries=30)
def notify(self): content = '' # Get pushlog and ask the backend to generate the coverage by changeset # data, which will be cached. r = requests.get( 'https://hg.mozilla.org/mozilla-central/json-pushes?changeset=%s&version=2&full' % self.revision) r.raise_for_status() push_data = r.json() changesets = sum( (data['changesets'] for data in push_data['pushes'].values()), []) for changeset in changesets: desc = changeset['desc'].split('\n')[0] if any(text in desc for text in ['r=merge', 'a=merge']): continue try: rev = changeset['node'] coverage = retry(lambda: self.get_coverage_summary(rev), retries=10) if coverage is None: continue if coverage['commit_covered'] < 0.2 * coverage['commit_added']: content += '* [%s](https://firefox-code-coverage.herokuapp.com/#/changeset/%s): %d covered out of %d added.\n' % ( desc, rev, coverage['commit_covered'], coverage['commit_added']) # noqa except HTTPError as e: continue if content == '': return elif len(content) > 102400: # Content is 102400 chars max content = content[:102000] + '\n\n... Content max limit reached!' for email in secrets[secrets.EMAIL_ADDRESSES]: self.notify_service.email({ 'address': email, 'subject': 'Coverage patches for %s' % self.revision, 'content': content, 'template': 'fullscreen', })
def clone_mozilla_central(self, revision): shared_dir = self.repo_dir + '-shared' cmd = hglib.util.cmdbuilder('robustcheckout', 'https://hg.mozilla.org/mozilla-central', self.repo_dir, purge=True, sharebase=shared_dir, branch=b'tip') cmd.insert(0, hglib.HGPATH) def do_clone(): proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) hg = hglib.open(self.repo_dir) hg.update(rev=revision, clean=True) retry(do_clone) logger.info('mozilla-central cloned')
def codecov_wait(commit): class TotalsNoneError(Exception): pass def check_codecov_job(): data = get_codecov(commit) totals = data['commit']['totals'] if totals is None: raise TotalsNoneError() return True try: return utils.retry(check_codecov_job, retries=30) except TotalsNoneError: return False
def codecov_wait(commit): class TotalsNoneError(Exception): pass def check_codecov_job(): r = requests.get('https://codecov.io/api/gh/{}/commit/{}?access_token={}'.format(secrets[secrets.CODECOV_REPO], commit, secrets[secrets.CODECOV_ACCESS_TOKEN])) # noqa r.raise_for_status() totals = r.json()['commit']['totals'] if totals is None: raise TotalsNoneError() return True try: return utils.retry(check_codecov_job, retries=30) except TotalsNoneError: return False
def mercurial_to_git(self, mercurial_commit): def mercurial_to_git(): r = requests.get('{}/gecko-dev/rev/hg/{}'.format(self.hg_git_mapper, mercurial_commit)) if not r.ok: for email in secrets[secrets.REPO_MAPPER_EMAIL_ADDRESSES]: self.notify_service.email({ 'address': email, 'subject': 'Missing commit in the mapper service', 'content': f'Mercurial commit {mercurial_commit} is missing on the mapper service.', 'template': 'fullscreen', }) raise Exception('Mercurial commit is not available yet on mozilla/gecko-dev.') return r.text.split(' ')[0] return retry(mercurial_to_git, retries=30)
def update_geckodev_repo(self): if self.gecko_dev_user is None or self.gecko_dev_pwd is None: return run_check(['git', 'config', '--global', 'http.postBuffer', '12M']) repo_url = 'https://%s:%[email protected]/marco-c/gecko-dev' % (self.gecko_dev_user, self.gecko_dev_pwd) repo_path = os.path.join(self.cache_root, 'gecko-dev') if not os.path.isdir(repo_path): retry(lambda: run_check(['git', 'clone', repo_url], cwd=self.cache_root)) retry(lambda: run_check(['git', 'pull', 'https://github.com/mozilla/gecko-dev', 'master'], cwd=repo_path)) retry(lambda: run_check(['git', 'push', repo_url, 'master'], cwd=repo_path))
def get_build_task_in_group(self, group_id): if group_id in self.triggered_groups: logger.info('Received duplicated groupResolved notification', group=group_id) return None def maybe_trigger(tasks): for task in tasks: if self.is_coverage_task(task): self.triggered_groups.add(group_id) return task return None list_url = 'https://queue.taskcluster.net/v1/task-group/{}/list'.format( group_id) def retrieve_coverage_task(): r = requests.get(list_url, params={'limit': 200}) r.raise_for_status() reply = r.json() task = maybe_trigger(reply['tasks']) while task is None and 'continuationToken' in reply: r = requests.get(list_url, params={ 'limit': 200, 'continuationToken': reply['continuationToken'] }) r.raise_for_status() reply = r.json() task = maybe_trigger(reply['tasks']) return task try: return retry(retrieve_coverage_task) except requests.exceptions.HTTPError: return None
def mercurial_to_git(self, mercurial_commit): def mercurial_to_git(): r = requests.get('{}/gecko-dev/rev/hg/{}'.format( self.hg_git_mapper, mercurial_commit)) if not r.ok: for email in secrets[secrets.REPO_MAPPER_EMAIL_ADDRESSES]: self.notify_service.email({ 'address': email, 'subject': 'Missing commit in the mapper service', 'content': f'Mercurial commit {mercurial_commit} is missing on the mapper service.', 'template': 'fullscreen', }) raise Exception( 'Mercurial commit is not available yet on mozilla/gecko-dev.' ) return r.text.split(' ')[0] return retry(mercurial_to_git, retries=30)
def update_geckodev_repo(self): if self.gecko_dev_user is None or self.gecko_dev_pwd is None: return run_check(['git', 'config', '--global', 'http.postBuffer', '12M']) repo_url = 'https://%s:%[email protected]/marco-c/gecko-dev' % ( self.gecko_dev_user, self.gecko_dev_pwd) repo_path = os.path.join(self.cache_root, 'gecko-dev') if not os.path.isdir(repo_path): retry(lambda: run_check(['git', 'clone', repo_url], cwd=self.cache_root)) retry(lambda: run_check( ['git', 'pull', 'https://github.com/mozilla/gecko-dev', 'master'], cwd=repo_path)) retry(lambda: run_check(['git', 'push', repo_url, 'master'], cwd=repo_path))
def notify(self): content = '' # Get pushlog and ask the backend to generate the coverage by changeset # data, which will be cached. with hgmo.HGMO(self.repo_dir) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets(self.revision) for changeset in changesets: desc = changeset['desc'].split('\n')[0] if any(text in desc for text in ['r=merge', 'a=merge']): continue rev = changeset['node'] try: coverage = retry(lambda: self.get_coverage_summary(rev)) except (requests.exceptions.HTTPError, ResultNotReadyException): logger.warn('Failure to retrieve coverage summary') continue if coverage['commit_covered'] < 0.2 * coverage['commit_added']: content += '* [{}](https://firefox-code-coverage.herokuapp.com/#/changeset/{}): {} covered out of {} added.\n'.format(desc, rev, coverage['commit_covered'], coverage['commit_added']) # noqa if content == '': return elif len(content) > 102400: # Content is 102400 chars max content = content[:102000] + '\n\n... Content max limit reached!' for email in secrets[secrets.EMAIL_ADDRESSES]: self.notify_service.email({ 'address': email, 'subject': 'Coverage patches for {}'.format(self.revision), 'content': content, 'template': 'fullscreen', })
def get_build_task_in_group(self, group_id): if group_id in self.triggered_groups: logger.info('Received duplicated groupResolved notification', group=group_id) return None def maybe_trigger(tasks): for task in tasks: if self.is_coverage_task(task): self.triggered_groups.add(group_id) return task return None list_url = 'https://queue.taskcluster.net/v1/task-group/{}/list'.format(group_id) def retrieve_coverage_task(): r = requests.get(list_url, params={ 'limit': 200 }) r.raise_for_status() reply = r.json() task = maybe_trigger(reply['tasks']) while task is None and 'continuationToken' in reply: r = requests.get(list_url, params={ 'limit': 200, 'continuationToken': reply['continuationToken'] }) r.raise_for_status() reply = r.json() task = maybe_trigger(reply['tasks']) return task try: return retry(retrieve_coverage_task) except requests.exceptions.HTTPError: return None