Exemple #1
0
    def rebuild(self, run_id, failed_only=False):
        if failed_only:
            api_version = '6.0-preview.1'
            data = '{"state":"retry"}'
            stages = [
                s['identifier'] for s in self.stages
                if s['result'] != 'succeeded'
            ]
        else:
            api_version = '6.1-preview.1'
            data = '{"state":"retry","forceRetryAllJobs":true}'
            stages = [s['identifier'] for s in self.stages]

        for stage in stages:
            url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=%s' % (
                run_id, stage, api_version)

            resp = fetch(
                url,
                verb='patch',
                headers=HEADERS,
                data=data,
                timeout=TIMEOUT,
                auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
            )
            if resp is not None and resp.status_code == 404:
                data = '{"definition":{"id":20},"reason":"pullRequest","sourceBranch":"refs/pull/%s/merge","repository":{"type":"github"},"triggerInfo":{"pr.sourceBranch":"%s","pr.sourceSha":"%s","pr.id":"%s","pr.title":"%s","pr.number":"%s","pr.isFork":"%s","pr.draft":"%s","pr.sender.name":"%s","pr.sender.avatarUrl":"%s","pr.providerId":"github","pr.autoCancel":"true"},"parameters":"{\\"system.pullRequest.pullRequestId\\":\\"%s\\",\\"system.pullRequest.pullRequestNumber\\":\\"%s\\",\\"system.pullRequest.mergedAt\\":\\"\\",\\"system.pullRequest.sourceBranch\\":\\"%s\\",\\"system.pullRequest.targetBranch\\":\\"%s\\",\\"system.pullRequest.sourceRepositoryUri\\":\\"https://github.com/ansible/ansible\\",\\"system.pullRequest.sourceCommitId\\":\\"%s\\"}"}' % (
                    self._iw.number,
                    self._iw._pr.head.ref,
                    self._iw._pr.head.sha,
                    self._iw._pr.id,
                    self._iw._pr.title,
                    self._iw._pr.number,
                    self._iw.from_fork,
                    self._iw._pr.draft,
                    self._iw._pr.user.login,
                    self._iw._pr.user.avatar_url,
                    self._iw._pr.id,
                    self._iw._pr.number,
                    self._iw._pr.head.ref,
                    self._iw._pr.base.ref,
                    self._iw._pr.head.sha,
                )

                url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds?api-version=6.0'
                resp = fetch(
                    url,
                    verb='post',
                    headers=HEADERS,
                    data=data,
                    timeout=30,
                    auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
                )
                if not resp:
                    raise Exception("Unable to POST %r to %r" % (data, url))
                break
Exemple #2
0
    def rebuild(self, run_id, failed_only=False):
        data = {'state': 'retry'}
        if failed_only:
            api_version = '6.0-preview.1'
            stages = [
                s['identifier'] for s in self.stages
                if s['result'] != 'succeeded'
            ]
        else:
            api_version = '6.1-preview.1'
            data['forceRetryAllJobs'] = True
            stages = [s['identifier'] for s in self.stages]

        for stage in stages:
            url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=%s' % (
                run_id, stage, api_version)

            resp = fetch(
                url,
                verb='patch',
                headers=HEADERS,
                data=json.dumps(data),
                timeout=TIMEOUT,
                auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
            )
            if resp is not None and resp.status_code == 404:
                self._rebuild_old()
                break
Exemple #3
0
    def jobs(self):
        if not self.build_id:
            return []

        if self._jobs is None:
            self._jobs = []
            self._updated_at = strip_time_safely('1970-01-01')
            self._stages = []

            if not os.path.isdir(self._cachedir):
                os.makedirs(self._cachedir)
            cache_file = os.path.join(self._cachedir,
                                      u'timeline_%s.pickle' % self.build_id)

            url = TIMELINE_URL_FMT % self.build_id
            resp = fetch(url, timeout=TIMEOUT)
            if resp is None:
                raise Exception('Unable to GET %s' % url)

            if resp.status_code == 404:
                data = None
                if os.path.isfile(cache_file):
                    logging.info(
                        u'timeline was probably removed, load it from cache')
                    with open(cache_file, 'rb') as f:
                        data = pickle.load(f)
            else:
                data = resp.json()
                data = (strip_time_safely(data['lastChangedOn']), data)
                logging.info(u'writing %s' % cache_file)
                with open(cache_file, 'wb') as f:
                    pickle.dump(data, f)

            if data is not None:
                data = data[1]
                self._jobs = [r for r in data['records'] if r['type'] == 'Job']
                self._updated_at = strip_time_safely(data['lastChangedOn'])
                self._stages = [
                    r for r in data['records'] if r['type'] == 'Stage'
                ]

                state = list({j['state']
                              for j in self.jobs
                              })  # pending, completed, inProgress
                result = list({j['result']
                               for j in self.jobs})  # succeeded, failed, None
                if 'canceled' in result or 'cancelled' in result:
                    self._state = 'failure'
                elif len(state) == 1 and 'completed' in state:
                    if len(result) == 1 and 'succeeded' in result:
                        self._state = 'success'
                    elif 'failed' in result:
                        self._state = 'failure'
                elif 'pending' in state or 'inProgress' in state:
                    self._state = 'pending'
                else:
                    raise ValueError(
                        'Unknown state for buildId: %s, state: %s' %
                        (self.build_id, state))
        return self._jobs
Exemple #4
0
    def rebuild(self, run_id, failed_only=False):
        if failed_only:
            api_version = u'6.0-preview.1'
            data = '{"state":"retry"}'
            stages = [
                s['identifier'] for s in self.stages
                if s['result'] != 'succeeded'
            ]
        else:
            api_version = u'6.1-preview.1'
            data = '{"state":"retry","forceRetryAllJobs":true}'
            stages = [s['identifier'] for s in self.stages]

        for stage in stages:
            if stage == 'Summary':
                continue
            url = u'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=%s' % (
                run_id, stage, api_version)

            resp = fetch(
                url,
                verb='patch',
                headers=HEADERS,
                data=data,
                timeout=TIMEOUT,
                auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
            )

            if not resp:
                raise Exception("Unable to PATCH %r to %r" % (data, url))
            check_response(resp)
Exemple #5
0
    def artifacts(self):
        if self._artifacts is None and self._jobs:
            if not os.path.isdir(self._cachedir):
                os.makedirs(self._cachedir)

            data = None
            cache_file = os.path.join(self._cachedir, 'artifacts_%s.pickle' % self.build_id)
            if os.path.isfile(cache_file):
                logging.info('load artifacts cache')
                with open(cache_file, 'rb') as f:
                    data = pickle.load(f)

            if data is None or (data and data[0] < self.updated_at) or not data[1]:
                if data:
                    logging.info('fetching artifacts: stale, previous from %s' % data[0])
                else:
                    logging.info('fetching artifacts: stale, no previous data')

                url = ARTIFACTS_URL_FMT % self.build_id
                resp = fetch(url)
                if resp is None:
                    raise Exception('Unable to GET %s' % url)

                if resp.status_code != 404:
                    data = [a for a in resp.json()['value'] if a['name'].startswith('Bot')]
                    data = (self.updated_at, data)

                    logging.info('writing %s' % cache_file)
                    with open(cache_file, 'wb') as f:
                        pickle.dump(data, f)
            if data:
                self._artifacts = data[1]

        return self._artifacts
Exemple #6
0
    def _get_url(self, url, usecache=False, timeout=TIMEOUT):
        cdir = os.path.join(self.cachedir, u'.raw')
        if not os.path.isdir(cdir):
            os.makedirs(cdir)
        cfile = url.replace(SHIPPABLE_URL + '/', u'')
        cfile = cfile.replace(u'/', u'_')
        cfile = os.path.join(cdir, cfile + u'.json')
        gzfile = cfile + u'.gz'

        # transparently compress old logs
        if os.path.isfile(cfile) and not os.path.isfile(gzfile):
            compress_gzip_file(cfile, gzfile)

        rc = None
        jdata = None
        if os.path.isfile(gzfile):
            try:
                fdata = read_gzip_json_file(gzfile)
                rc = fdata[0]
                jdata = fdata[1]
            except ValueError:
                pass

            if rc == 400:
                return None

        # always use cache for finished jobs...
        is_finished = False
        if isinstance(jdata, list):
            ts = [x.get('endedAt') for x in jdata]
            if None not in ts:
                is_finished = True
        elif isinstance(jdata, dict) and jdata.get(u'endedAt'):
            is_finished = True

        resp = None
        if not os.path.isfile(gzfile) or not jdata or (not usecache
                                                       and not is_finished):
            if os.path.isfile(gzfile):
                logging.error(gzfile)

            resp = fetch(url, headers=HEADERS, timeout=timeout)
            if not resp:
                return None

            if resp.status_code != 400:
                jdata = resp.json()
                write_gzip_json_file(gzfile, [resp.status_code, jdata])
            else:
                write_gzip_json_file(gzfile, [resp.status_code, {}])
                return None

        check_response(resp)

        if not jdata:
            raise ShippableNoData

        return jdata
Exemple #7
0
 def _get_run_id(self, run_number):
     run_url = u"%s&runNumbers=%s" % (ANSIBLE_RUNS_URL, run_number)
     response = fetch(run_url, headers=HEADERS, timeout=TIMEOUT)
     if not response:
         raise Exception("Unable to fetch %r" % run_url)
     check_response(response)
     run_id = response.json()[0][u'id']
     logging.debug(run_id)
     return run_id
Exemple #8
0
    def rebuild(self, run_id, failed_only=False):
        if failed_only:
            api_version = '6.0-preview.1'
            data = '{"state":"retry"}'
            stages = [
                s['identifier'] for s in self.stages
                if s['result'] != 'succeeded'
            ]
        else:
            api_version = '6.1-preview.1'
            data = '{"state":"retry","forceRetryAllJobs":true}'
            stages = [s['identifier'] for s in self.stages]

        for stage in stages:
            if stage == 'Summary':
                continue
            url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=%s' % (
                run_id, stage, api_version)

            resp = fetch(
                url,
                verb='patch',
                headers=HEADERS,
                data=data,
                timeout=TIMEOUT,
                auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
            )

            if not resp:
                data = '{"resources":{"repositories":{"self":{"refName": "refs/pull/%s/head"}}}}' % self._iw.number
                url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/pipelines/20/runs?api-version=6.0-preview.1'
                resp = fetch(
                    url,
                    verb='post',
                    headers=HEADERS,
                    data=data,
                    timeout=30,
                    auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
                )
                if not resp:
                    raise Exception("Unable to POST %r to %r" % (data, url))
                break

            check_response(resp)
Exemple #9
0
    def _rebuild_old(self):
        data = json.dumps({
            'definition': {
                'id': C.DEFAULT_AZP_DEFINITION,
            },
            'reason':
            'pullRequest',
            'sourceBranch':
            'refs/pull/%s/merge' % self._iw.number,
            'repository': {
                'type': 'github'
            },
            'triggerInfo': {
                'pr.sourceBranch': self._iw._pr.head.ref,
                'pr.sourceSha': self._iw._pr.head.sha,
                'pr.id': self._iw._pr.id,
                'pr.title': self._iw._pr.title,
                'pr.number': self._iw._pr.number,
                'pr.isFork': self._iw.from_fork,
                'pr.draft': self._iw._pr.draft,
                'pr.sender.name': self._iw._pr.user.login,
                'pr.sender.avatarUrl': self._iw._pr.user.avatar_url,
                'pr.providerId': 'github',
                'pr.autoCancel': 'true',
            },
            'parameters':
            json.dumps({
                'system.pullRequest.pullRequestId':
                self._iw._pr.id,
                'system.pullRequest.pullRequestNumber':
                self._iw._pr.number,
                'system.pullRequest.mergedAt':
                '',
                'system.pullRequest.sourceBranch':
                self._iw._pr.head.ref,
                'system.pullRequest.targetBranch':
                self._iw._pr.base.ref,
                'system.pullRequest.sourceRepositoryUri':
                'https://github.com/' + self._iw.repo.repo.full_name,
                'system.pullRequest.sourceCommitId':
                self._iw._pr.head.sha,
            }),
        })

        resp = fetch(
            NEW_BUILD,
            verb='post',
            headers=HEADERS,
            data=data,
            timeout=30,
            auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
        )
        if not resp:
            raise Exception('Unable to POST %r to %r' % (data, NEW_BUILD))
Exemple #10
0
    def cancel(self, run_number):
        """cancel existing run"""
        # always pass the runId in a dict() to requests
        run_id = self._get_run_id(run_number)
        data = {'runId': run_id}

        cancel_url = "%s/runs/%s/cancel" % (SHIPPABLE_URL, run_id)
        response = fetch(cancel_url, verb='post', headers=HEADERS, data=data, timeout=TIMEOUT)
        if not response:
            raise Exception("Unable to POST %r to %r" % (data, cancel_url))
        check_response(response)
        return response
Exemple #11
0
    def rebuild(self, run_number, failed_only=False):
        """trigger a new run"""
        # always pass the runId in a dict() to requests
        run_id = self._get_run_id(run_number)
        data = {'runId': run_id}

        if failed_only:
            data['rerunFailedOnly'] = True

        response = fetch(NEW_BUILD_URL, verb='post', headers=HEADERS, data=data, timeout=TIMEOUT)
        if not response:
            raise Exception("Unable to POST %r to %r" % (data, NEW_BUILD_URL))
        check_response(response)
        return response
Exemple #12
0
    def jobs(self):
        if self._jobs is None:
            if self.build_id:
                if not os.path.isdir(self._cachedir):
                    os.makedirs(self._cachedir)
                cache_file = os.path.join(
                    self._cachedir, u'timeline_%s.pickle' % self.build_id)

                url = TIMELINE_URL_FMT % self.build_id
                resp = fetch(url)
                if resp is None:
                    raise Exception("Unable to GET %s" % url)

                if resp.status_code == 404:
                    data = None
                    if os.path.isfile(cache_file):
                        logging.info(
                            u'timeline was probably removed, load it from cache'
                        )
                        with open(cache_file, 'rb') as f:
                            data = pickle.load(f)
                else:
                    data = resp.json()
                    data = (strip_time_safely(data['lastChangedOn']), data)
                    logging.info(u'writing %s' % cache_file)
                    with open(cache_file, 'wb') as f:
                        pickle.dump(data, f)

                if data is not None:
                    data = data[1]
                    self._jobs = [
                        r for r in data['records'] if r['type'] == 'Job'
                    ]
                    self._updated_at = strip_time_safely(
                        data['lastChangedOn'])  # FIXME
                    self._stages = [
                        r for r in data['records'] if r['type'] == 'Stage'
                    ]  # FIXME
                else:
                    self._jobs = []
                    self._updated_at = strip_time_safely('1970-01-01')
                    self._stages = []
            else:
                self._jobs = []
        return self._jobs
Exemple #13
0
    def get_artifact(self, name, url):
        if not os.path.isdir(self._cachedir):
            os.makedirs(self._cachedir)

        data = None
        cache_file = os.path.join(
            self._cachedir,
            '%s_%s.pickle' % (name.replace(' ', '-'), self.build_id))
        if os.path.isfile(cache_file):
            logging.info('loading %s' % cache_file)
            with open(cache_file, 'rb') as f:
                data = pickle.load(f)

        if data is None or (data and data[0] < self.updated_at) or not data[1]:
            if data:
                logging.info('fetching artifacts: stale, previous from %s' %
                             data[0])
            else:
                logging.info('fetching artifacts: stale, no previous data')

            resp = fetch(url, stream=True)
            if resp is None:
                raise Exception("Unable to GET %s" % url)

            if resp.status_code != 404:
                with BytesIO() as data:
                    for chunk in resp.iter_content(chunk_size=128):
                        data.write(chunk)
                    artifact_zip = ZipFile(data)

                    artifact_data = []
                    for fn in artifact_zip.namelist():
                        if 'ansible-test-' not in fn:
                            continue
                        with artifact_zip.open(fn) as f:
                            artifact_data.append(json.load(f))

                    data = (self.updated_at, artifact_data)
                    logging.info('writing %s' % cache_file)
                    with open(cache_file, 'wb') as f:
                        pickle.dump(data, f)
        if data:
            return data[1]
Exemple #14
0
    def cancel(self, run_id):
        stages_in_progress = (s['identifier'] for s in self.stages
                              if s['state'] != 'completed')
        for stage in stages_in_progress:
            if stage == 'Summary':
                continue

            url = STAGE_URL_FMT % (run_id, stage)
            data = json.dumps({'state': 'cancel'})
            resp = fetch(
                url,
                verb='patch',
                headers=HEADERS,
                data=data,
                timeout=TIMEOUT,
                auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
            )

            if not resp:
                raise Exception('Unable to PATCH %r to %r' % (data, url))
Exemple #15
0
    def cancel(self, run_id):
        data = '{"state":"cancel"}'
        for stage in [
                s['identifier'] for s in self.stages
                if s['state'] != 'completed'
        ]:
            if stage == 'Summary':
                continue
            url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=6.0-preview.1' % (
                run_id, stage)

            resp = fetch(
                url,
                verb='patch',
                headers=HEADERS,
                data=data,
                timeout=TIMEOUT,
                auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
            )

            if not resp:
                raise Exception("Unable to PATCH %r to %r" % (data, url))
Exemple #16
0
    def artifacts(self):
        if self._artifacts is None:
            # FIXME deduplicate code
            if not os.path.isdir(self._cachedir):
                os.makedirs(self._cachedir)

            data = None
            cache_file = os.path.join(self._cachedir,
                                      u'artifacts_%s.pickle' % self.build_id)
            if os.path.isfile(cache_file):
                logging.info(u'load artifacts cache')
                with open(cache_file, 'rb') as f:
                    data = pickle_load(f)

            if data is None or (data
                                and data[0] < self.updated_at) or not data[1]:
                if data:
                    logging.info(
                        u'fetching artifacts: stale, previous from %s' %
                        data[0])
                else:
                    logging.info(
                        u'fetching artifacts: stale, no previous data')

                resp = fetch(ARTIFACTS_URL_FMT % self.build_id)
                if resp is not None:
                    data = [
                        a for a in resp.json()['value']
                        if a['name'].startswith('Bot')
                    ]
                    data = (self.updated_at, data)

                    logging.info(u'writing %s' % cache_file)
                    with open(cache_file, 'wb') as f:
                        pickle_dump(data, f)
            if data:
                self._artifacts = data[1]

        return self._artifacts