def to_PR(self, start_build=False): if self.target_sha is None: target_sha = latest_sha_for_ref(self.target_ref) else: target_sha = self.target_sha target = FQSHA(self.target_ref, target_sha) pr = PR.fresh(self.source, target, self.number, self.title) if start_build: return pr.build_it() else: return pr
def refresh_github_state(): for target_repo in prs.watched_repos(): try: pulls = open_pulls(target_repo) pulls_by_target = collections.defaultdict(list) latest_target_shas = {} for pull in pulls: gh_pr = GitHubPR.from_gh_json(pull) if gh_pr.target_ref not in latest_target_shas: latest_target_shas[gh_pr.target_ref] = latest_sha_for_ref(gh_pr.target_ref) sha = latest_target_shas[gh_pr.target_ref] gh_pr.target_sha = sha pulls_by_target[gh_pr.target_ref].append(gh_pr) refresh_pulls(target_repo, pulls_by_target) refresh_reviews(pulls_by_target) except Exception as e: log.exception( f'could not refresh state for {target_repo.short_str()} due to {e}') return '', 200
def refresh_github_state(): for target_repo in prs.watched_repos(): try: pulls = open_pulls(target_repo) pulls_by_target = collections.defaultdict(list) latest_target_shas = {} for pull in pulls: gh_pr = GitHubPR.from_gh_json(pull) if gh_pr.target_ref not in latest_target_shas: latest_target_shas[gh_pr.target_ref] = latest_sha_for_ref( gh_pr.target_ref) sha = latest_target_shas[gh_pr.target_ref] gh_pr.target_sha = sha pulls_by_target[gh_pr.target_ref].append(gh_pr) refresh_pulls(target_repo, pulls_by_target) refresh_reviews(pulls_by_target) except Exception as e: log.exception( f'could not refresh state for {target_repo.short_str()} due to {e}' ) return '', 200
def try_deploy(self, target_ref): assert isinstance(target_ref, FQRef) assert self.is_deployable_target_ref(target_ref), \ f'{target_ref} is non-deployable {[(ref.short_str(), deployable) for ref, deployable in self._watched_targets.items()]}' old_job = self.deploy_jobs.get(target_ref, None) if old_job is not None: log.info( f'will not deploy while deploy job {old_job.id} is running') return latest_sha = latest_sha_for_ref(target_ref) if latest_sha == self.latest_deployed[target_ref]: log.info(f'already deployed {latest_sha}') return try: img = get_image_for_target(target_ref) attributes = { 'target': json.dumps(FQSHA(target_ref, latest_sha).to_json()), 'image': img, 'type': DEPLOY_JOB_TYPE } env = { 'DEPLOY_REPO_URL': target_ref.repo.url, 'DEPLOY_BRANCH': target_ref.name, 'DEPLOY_SHA': latest_sha } volumes = [{ 'volume': { 'name': 'docker-sock-volume', 'hostPath': { 'path': '/var/run/docker.sock', 'type': 'File' } }, 'volume_mount': { 'mountPath': '/var/run/docker.sock', 'name': 'docker-sock-volume' } }] if target_ref.repo.owner == "hail-ci-test": # special case for test repos deploy_secret = f'ci-deploy-{VERSION}--hail-is-ci-test-service-account-key' else: deploy_secret = PRS._deploy_secrets.get(target_ref.repo, None) if deploy_secret: volumes.append({ 'volume': { 'name': f'{deploy_secret}', 'secret': { 'optional': False, 'secretName': f'{deploy_secret}' } }, 'volume_mount': { 'mountPath': '/secrets', 'name': f'{deploy_secret}', 'readOnly': True } }) job = batch_client.create_job( img, command=['/bin/bash', '-c', PR_DEPLOY_SCRIPT], env=env, resources={'requests': { 'cpu': '3.7', 'memory': '4G' }}, volumes=volumes, tolerations=[{ 'key': 'preemptible', 'value': 'true' }], security_context={ 'fsGroup': 412, }, attributes=attributes, callback=SELF_HOSTNAME + '/deploy_build_done') log.info( f'deploying {target_ref.short_str()}:{latest_sha} in job {job.id}' ) self.deploy_jobs[target_ref] = job except Exception as e: log.exception(f'could not start deploy job due to {e}')
def try_deploy(self, target_ref): assert isinstance(target_ref, FQRef) assert self.is_deployable_target_ref(target_ref), \ f'{target_ref} is non-deployable {[(ref.short_str(), deployable) for ref, deployable in self._watched_targets.items()]}' old_job = self.deploy_jobs.get(target_ref, None) if old_job is not None: log.info(f'will not deploy while deploy job {old_job.id} is running') return latest_sha = latest_sha_for_ref(target_ref) if latest_sha == self.latest_deployed[target_ref]: log.info(f'already deployed {latest_sha}') return try: img = get_image_for_target(target_ref) attributes = { 'target': json.dumps(FQSHA(target_ref, latest_sha).to_json()), 'image': img, 'type': DEPLOY_JOB_TYPE } env = { 'DEPLOY_REPO_URL': target_ref.repo.url, 'DEPLOY_BRANCH': target_ref.name, 'DEPLOY_SHA': latest_sha } volumes = [{ 'volume': { 'name': 'docker-sock-volume', 'hostPath': { 'path': '/var/run/docker.sock', 'type': 'File' } }, 'volume_mount': { 'mountPath': '/var/run/docker.sock', 'name': 'docker-sock-volume' } }] if target_ref.repo.owner == "hail-ci-test": # special case for test repos deploy_secret = f'ci-deploy-{VERSION}--hail-is-ci-test-service-account-key' else: deploy_secret = PRS._deploy_secrets.get(target_ref.repo, None) if deploy_secret: volumes.append({ 'volume': { 'name': f'{deploy_secret}', 'secret': { 'optional': False, 'secretName': f'{deploy_secret}' } }, 'volume_mount': { 'mountPath': '/secrets', 'name': f'{deploy_secret}', 'readOnly': True } }) job = batch_client.create_job( img, command=['/bin/bash', '-c', PR_DEPLOY_SCRIPT], env=env, resources={'requests': { 'cpu': '3.7', 'memory': '4G' }}, volumes=volumes, tolerations=[{ 'key': 'preemptible', 'value': 'true' }], security_context={ 'fsGroup': 412, }, service_account_name='deploy-svc', attributes=attributes, callback=SELF_HOSTNAME + '/deploy_build_done') log.info(f'deploying {target_ref.short_str()}:{latest_sha} in job {job.id}') self.deploy_jobs[target_ref] = job except Exception as e: log.exception(f'could not start deploy job due to {e}')