Example #1
0
def refresh_ci_build_jobs(jobs):
    jobs = [
        (FQSHA.from_json(json.loads(job.attributes['source'])),
         FQSHA.from_json(json.loads(job.attributes['target'])),
         job)
        for job in jobs
    ]
    jobs = [(s, t, j) for (s, t, j) in jobs if prs.exists(s, t)]
    latest_jobs = {}
    for (source, target, job) in jobs:
        key = (source, target)
        job2 = latest_jobs.get(key, None)
        if job2 is None:
            latest_jobs[key] = job
        else:
            if job_ordering(job, job2) > 0:
                log.info(
                    f'cancelling {job2.id}, preferring {job.id}'
                )
                try_to_cancel_job(job2)
                latest_jobs[key] = job
            else:
                log.info(
                    f'cancelling {job.id}, preferring {job2.id}'
                )
                try_to_cancel_job(job)
    prs.refresh_from_ci_jobs(latest_jobs)
Example #2
0
    def update_from_completed_batch_job(self, job):
        assert isinstance(job, Job)
        job_status = job.cached_status()
        exit_code = job_status['exit_code']
        job_source = FQSHA.from_json(json.loads(job.attributes['source']))
        job_target = FQSHA.from_json(json.loads(job.attributes['target']))
        assert job_source.ref == self.source.ref
        assert job_target.ref == self.target.ref

        if job_target.sha != self.target.sha:
            log.info(
                f'notified of job for old target {job.id}'
                # too noisy: f' {job.attributes} {self.short_str()}'
            )
            x = self
        elif job_source.sha != self.source.sha:
            log.info(
                f'notified of job for old source {job.id}'
                # too noisy: f' {job.attributes} {self.short_str()}'
            )
            x = self
        elif exit_code == 0:
            log.info(f'job finished success {short_str_build_job(job)} {self.short_str()}')
            x = self._new_build(Mergeable(self.target.sha))
        else:
            log.info(f'job finished failure {short_str_build_job(job)} {self.short_str()}')
            x = self._new_build(
                Failure(exit_code,
                        job.attributes['image'],
                        self.target.sha))
        job.delete()
        return x
Example #3
0
    def update_from_completed_batch_job(self, job):
        assert isinstance(job, Job)
        job_status = job.cached_status()
        exit_code = job_status['exit_code']
        job_source = FQSHA.from_json(json.loads(job.attributes['source']))
        job_target = FQSHA.from_json(json.loads(job.attributes['target']))
        assert job_source.ref == self.source.ref
        assert job_target.ref == self.target.ref

        if job_target.sha != self.target.sha:
            log.info(f'notified of job for old target {job.id}'
                     # too noisy: f' {job.attributes} {self.short_str()}'
                     )
            x = self
        elif job_source.sha != self.source.sha:
            log.info(f'notified of job for old source {job.id}'
                     # too noisy: f' {job.attributes} {self.short_str()}'
                     )
            x = self
        elif exit_code == 0:
            log.info(
                f'job finished success {short_str_build_job(job)} {self.short_str()}'
            )
            x = self._new_build(Mergeable(self.target.sha))
        else:
            log.info(
                f'job finished failure {short_str_build_job(job)} {self.short_str()}'
            )
            x = self._new_build(
                Failure(exit_code, job.attributes['image'], self.target.sha))
        job.delete()
        return x
Example #4
0
def ci_build_done():
    d = request.json
    attributes = d['attributes']
    source = FQSHA.from_json(json.loads(attributes['source']))
    target = FQSHA.from_json(json.loads(attributes['target']))
    job = Job(batch_client, d['id'], attributes=attributes, _status=d)
    receive_ci_job(source, target, job)
    return '', 200
Example #5
0
def ci_build_done():
    d = request.json
    attributes = d['attributes']
    source = FQSHA.from_json(json.loads(attributes['source']))
    target = FQSHA.from_json(json.loads(attributes['target']))
    job = Job(batch_client, d['id'], attributes=attributes, _status=d)
    receive_ci_job(source, target, job)
    return '', 200
Example #6
0
 def from_gh_json(d, target_sha=None):
     assert 'state' in d, d
     assert 'number' in d, d
     assert 'title' in d, d
     assert 'head' in d, d
     assert 'base' in d, d
     return GitHubPR(d['state'], str(d['number']), str(d['title']),
                     FQSHA.from_gh_json(d['head']),
                     FQSHA.from_gh_json(d['base']).ref, target_sha)
Example #7
0
 def from_gh_json(d, target_sha=None):
     assert 'state' in d, d
     assert 'number' in d, d
     assert 'title' in d, d
     assert 'head' in d, d
     assert 'base' in d, d
     return GitHubPR(d['state'],
                     str(d['number']),
                     str(d['title']),
                     FQSHA.from_gh_json(d['head']),
                     FQSHA.from_gh_json(d['base']).ref,
                     target_sha)
Example #8
0
 def from_json(d):
     assert 'target' in d, d
     assert 'source' in d, d
     assert 'review' in d, d
     assert 'build' in d, d
     assert 'number' in d, d
     assert 'title' in d, d
     return PR(
         FQSHA.from_json(d['source']),
         FQSHA.from_json(d['target']),
         d['review'],
         build_state_from_json(d['build']),
         d['number'],
         d['title'],
     )
Example #9
0
 def from_json(d):
     assert 'target' in d, d
     assert 'source' in d, d
     assert 'review' in d, d
     assert 'build' in d, d
     assert 'number' in d, d
     assert 'title' in d, d
     return PR(
         FQSHA.from_json(d['source']),
         FQSHA.from_json(d['target']),
         d['review'],
         build_state_from_json(d['build']),
         d['number'],
         d['title'],
     )
Example #10
0
def refresh_deploy_jobs(jobs):
    jobs = [
        (FQSHA.from_json(json.loads(job.attributes['target'])),
         job)
        for job in jobs
        if 'target' in job.attributes
    ]
    jobs = [
        (target, job)
        for (target, job) in jobs
        if target.ref in prs.deploy_jobs
    ]
    latest_jobs = {}
    for (target, job) in jobs:
        job2 = latest_jobs.get(target, None)
        if job2 is None:
            latest_jobs[target] = job
        else:
            if job_ordering(job, job2) > 0:
                log.info(
                    f'cancelling {job2.id}, preferring {job.id}'
                )
                try_to_cancel_job(job2)
                latest_jobs[target] = job
            else:
                log.info(
                    f'cancelling {job.id}, preferring {job2.id}'
                )
                try_to_cancel_job(job)
    prs.refresh_from_deploy_jobs(latest_jobs)
Example #11
0
def deploy_build_done():
    d = request.json
    attributes = d['attributes']
    target = FQSHA.from_json(json.loads(attributes['target']))
    job = Job(batch_client, d['id'], attributes=attributes, _status=d)
    receive_deploy_job(target, job)
    return '', 200
Example #12
0
def short_str_build_job(job):
    state = job.cached_status()['state']
    attr = job.attributes
    assert 'target' in attr, f'{attr} {job.id}'
    assert 'source' in attr, f'{attr} {job.id}'
    assert 'type' in attr, f'{attr} {job.id}'
    assert 'image' in attr, f'{attr} {job.id}'
    target = FQSHA.from_json(json.loads(attr['target']))
    source = FQSHA.from_json(json.loads(attr['source']))
    return (
        f'[buildjob {job.id}]{state};'
        f'{target.short_str()}'
        f'..'
        f'{source.short_str()};'
        f'{attr["type"]};{attr["image"]};'
    )
Example #13
0
def deploy_build_done():
    d = request.json
    attributes = d['attributes']
    target = FQSHA.from_json(json.loads(attributes['target']))
    job = Job(batch_client, d['id'], attributes=attributes, _status=d)
    receive_deploy_job(target, job)
    return '', 200
Example #14
0
def github_push():
    d = request.json
    ref = d['ref']
    if ref.startswith('refs/heads'):
        target_ref = FQRef(Repo.from_gh_json(d['repository']), ref[11:])
        target = FQSHA(target_ref, d['after'])
        prs.push(target)
    else:
        log.info(f'ignoring ref push {ref} because it does not start with '
                 '"refs/heads/"')
    return '', 200
Example #15
0
def refresh_ci_build_jobs(jobs):
    jobs = [(FQSHA.from_json(json.loads(job.attributes['source'])),
             FQSHA.from_json(json.loads(job.attributes['target'])), job)
            for job in jobs]
    jobs = [(s, t, j) for (s, t, j) in jobs if prs.exists(s, t)]
    latest_jobs = {}
    for (source, target, job) in jobs:
        key = (source, target)
        job2 = latest_jobs.get(key, None)
        if job2 is None:
            latest_jobs[key] = job
        else:
            if job_ordering(job, job2) > 0:
                log.info(f'cancelling {job2.id}, preferring {job.id}')
                try_to_cancel_job(job2)
                latest_jobs[key] = job
            else:
                log.info(f'cancelling {job.id}, preferring {job2.id}')
                try_to_cancel_job(job)
    prs.refresh_from_ci_jobs(latest_jobs)
Example #16
0
 def to_PR(self, start_build=False):
     if self.target_sha is None:
         target_sha = latest_sha_for_ref(self.target_ref)
     else:
         target_sha = self.target_sha
     target = FQSHA(self.target_ref, target_sha)
     pr = PR.fresh(self.source, target, self.number, self.title)
     if start_build:
         return pr.build_it()
     else:
         return pr
Example #17
0
def github_pull_request():
    d = request.json
    assert 'action' in d, d
    assert 'pull_request' in d, d
    action = d['action']
    if action in ('opened', 'synchronize'):
        target_sha = FQSHA.from_gh_json(d['pull_request']['base']).sha
        gh_pr = GitHubPR.from_gh_json(d['pull_request'], target_sha)
        prs.pr_push(gh_pr)
    elif action == 'closed':
        gh_pr = GitHubPR.from_gh_json(d['pull_request'])
        log.info(f'forgetting closed pr {gh_pr.short_str()}')
        prs.forget(gh_pr.source.ref, gh_pr.target_ref)
    else:
        log.info(f'ignoring pull_request with action {action}')
    return '', 200
Example #18
0
 def update_from_github_pr(self, gh_pr):
     assert isinstance(gh_pr, GitHubPR)
     assert self.target.ref == gh_pr.target_ref
     assert self.source.ref == gh_pr.source.ref
     # this will build new PRs when the server restarts
     if gh_pr.target_sha:
         result = self._maybe_new_shas(
             new_source=gh_pr.source,
             new_target=FQSHA(gh_pr.target_ref, gh_pr.target_sha))
     else:
         result = self._maybe_new_shas(new_source=gh_pr.source)
     if self.title != gh_pr.title:
         log.info(f'found new title from github {gh_pr.title} {self.short_str()}')
         result = result.copy(title=gh_pr.title)
     if self.number != gh_pr.number:
         log.info(f'found new PR number from github {gh_pr.title} {self.short_str()}')
         result = result.copy(number=gh_pr.number)
     return result
Example #19
0
def refresh_deploy_jobs(jobs):
    jobs = [(FQSHA.from_json(json.loads(job.attributes['target'])), job)
            for job in jobs if 'target' in job.attributes]
    jobs = [(target, job) for (target, job) in jobs
            if target.ref in prs.deploy_jobs]
    latest_jobs = {}
    for (target, job) in jobs:
        job2 = latest_jobs.get(target, None)
        if job2 is None:
            latest_jobs[target] = job
        else:
            if job_ordering(job, job2) > 0:
                log.info(f'cancelling {job2.id}, preferring {job.id}')
                try_to_cancel_job(job2)
                latest_jobs[target] = job
            else:
                log.info(f'cancelling {job.id}, preferring {job2.id}')
                try_to_cancel_job(job)
    prs.refresh_from_deploy_jobs(latest_jobs)
Example #20
0
def github_pull_request():
    d = request.json
    if 'zen' in d:
        log.info(f'received zen: {d["zen"]}')
        return '', 200

    assert 'action' in d, d
    assert 'pull_request' in d, d
    action = d['action']
    if action in ('opened', 'synchronize'):
        target_sha = FQSHA.from_gh_json(d['pull_request']['base']).sha
        gh_pr = GitHubPR.from_gh_json(d['pull_request'], target_sha)
        prs.pr_push(gh_pr)
    elif action == 'closed':
        gh_pr = GitHubPR.from_gh_json(d['pull_request'])
        log.info(f'forgetting closed pr {gh_pr.short_str()}')
        prs.forget(gh_pr.source.ref, gh_pr.target_ref)
    else:
        log.info(f'ignoring pull_request with action {action}')
    return '', 200
Example #21
0
 def refresh_from_batch_job(self, job):
     state = job.cached_status()['state']
     if state == 'Complete':
         return self.update_from_completed_batch_job(job)
     elif state == 'Cancelled':
         log.error(
             f'a job for me was cancelled {short_str_build_job(job)} {self.short_str()}')
         job.delete()
         return self._new_build(try_new_build(self.source, self.target))
     else:
         assert state == 'Created', f'{state} {job.id} {job.attributes} {self.short_str()}'
         assert 'target' in job.attributes, job.attributes
         assert 'image' in job.attributes, job.attributes
         target = FQSHA.from_json(json.loads(job.attributes['target']))
         image = job.attributes['image']
         if target == self.target:
             return self._new_build(Building(job, image, target.sha))
         else:
             log.info(f'found deploy job {job.id} for wrong target {target}, should be {self.target}')
             job.delete()
             return self
Example #22
0
 def try_deploy(self, target_ref):
     assert isinstance(target_ref, FQRef)
     assert self.is_deployable_target_ref(target_ref), \
         f'{target_ref} is non-deployable {[(ref.short_str(), deployable) for ref, deployable in self._watched_targets.items()]}'
     old_job = self.deploy_jobs.get(target_ref, None)
     if old_job is not None:
         log.info(
             f'will not deploy while deploy job {old_job.id} is running')
         return
     latest_sha = latest_sha_for_ref(target_ref)
     if latest_sha == self.latest_deployed[target_ref]:
         log.info(f'already deployed {latest_sha}')
         return
     try:
         img = get_image_for_target(target_ref)
         attributes = {
             'target': json.dumps(FQSHA(target_ref, latest_sha).to_json()),
             'image': img,
             'type': DEPLOY_JOB_TYPE
         }
         env = {
             'DEPLOY_REPO_URL': target_ref.repo.url,
             'DEPLOY_BRANCH': target_ref.name,
             'DEPLOY_SHA': latest_sha
         }
         volumes = [{
             'volume': {
                 'name': 'docker-sock-volume',
                 'hostPath': {
                     'path': '/var/run/docker.sock',
                     'type': 'File'
                 }
             },
             'volume_mount': {
                 'mountPath': '/var/run/docker.sock',
                 'name': 'docker-sock-volume'
             }
         }]
         if target_ref.repo.owner == "hail-ci-test":
             # special case for test repos
             deploy_secret = f'ci-deploy-{VERSION}--hail-is-ci-test-service-account-key'
         else:
             deploy_secret = PRS._deploy_secrets.get(target_ref.repo, None)
         if deploy_secret:
             volumes.append({
                 'volume': {
                     'name': f'{deploy_secret}',
                     'secret': {
                         'optional': False,
                         'secretName': f'{deploy_secret}'
                     }
                 },
                 'volume_mount': {
                     'mountPath': '/secrets',
                     'name': f'{deploy_secret}',
                     'readOnly': True
                 }
             })
         job = batch_client.create_job(
             img,
             command=['/bin/bash', '-c', PR_DEPLOY_SCRIPT],
             env=env,
             resources={'requests': {
                 'cpu': '3.7',
                 'memory': '4G'
             }},
             volumes=volumes,
             tolerations=[{
                 'key': 'preemptible',
                 'value': 'true'
             }],
             security_context={
                 'fsGroup': 412,
             },
             attributes=attributes,
             callback=SELF_HOSTNAME + '/deploy_build_done')
         log.info(
             f'deploying {target_ref.short_str()}:{latest_sha} in job {job.id}'
         )
         self.deploy_jobs[target_ref] = job
     except Exception as e:
         log.exception(f'could not start deploy job due to {e}')