def rebuild(self, run_id, failed_only=False): if failed_only: api_version = u'6.0-preview.1' data = '{"state":"retry"}' stages = [ s['identifier'] for s in self.stages if s['result'] != 'succeeded' ] else: api_version = u'6.1-preview.1' data = '{"state":"retry","forceRetryAllJobs":true}' stages = [s['identifier'] for s in self.stages] for stage in stages: if stage == 'Summary': continue url = u'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=%s' % ( run_id, stage, api_version) resp = fetch( url, verb='patch', headers=HEADERS, data=data, timeout=TIMEOUT, auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN), ) if not resp: raise Exception("Unable to PATCH %r to %r" % (data, url)) check_response(resp)
def _get_url(self, url, usecache=False, timeout=TIMEOUT): cdir = os.path.join(self.cachedir, u'.raw') if not os.path.isdir(cdir): os.makedirs(cdir) cfile = url.replace(SHIPPABLE_URL + '/', u'') cfile = cfile.replace(u'/', u'_') cfile = os.path.join(cdir, cfile + u'.json') gzfile = cfile + u'.gz' # transparently compress old logs if os.path.isfile(cfile) and not os.path.isfile(gzfile): compress_gzip_file(cfile, gzfile) rc = None jdata = None if os.path.isfile(gzfile): try: fdata = read_gzip_json_file(gzfile) rc = fdata[0] jdata = fdata[1] except ValueError: pass if rc == 400: return None # always use cache for finished jobs... is_finished = False if isinstance(jdata, list): ts = [x.get('endedAt') for x in jdata] if None not in ts: is_finished = True elif isinstance(jdata, dict) and jdata.get(u'endedAt'): is_finished = True resp = None if not os.path.isfile(gzfile) or not jdata or (not usecache and not is_finished): if os.path.isfile(gzfile): logging.error(gzfile) resp = fetch(url, headers=HEADERS, timeout=timeout) if not resp: return None if resp.status_code != 400: jdata = resp.json() write_gzip_json_file(gzfile, [resp.status_code, jdata]) else: write_gzip_json_file(gzfile, [resp.status_code, {}]) return None check_response(resp) if not jdata: raise ShippableNoData return jdata
def _get_run_id(self, run_number): run_url = u"%s&runNumbers=%s" % (ANSIBLE_RUNS_URL, run_number) response = fetch(run_url, headers=HEADERS, timeout=TIMEOUT) if not response: raise Exception("Unable to fetch %r" % run_url) check_response(response) run_id = response.json()[0][u'id'] logging.debug(run_id) return run_id
def cancel(self, run_number): """cancel existing run""" # always pass the runId in a dict() to requests run_id = self._get_run_id(run_number) data = {'runId': run_id} cancel_url = "%s/runs/%s/cancel" % (SHIPPABLE_URL, run_id) response = fetch(cancel_url, verb='post', headers=HEADERS, data=data, timeout=TIMEOUT) if not response: raise Exception("Unable to POST %r to %r" % (data, cancel_url)) check_response(response) return response
def rebuild(self, run_number, failed_only=False): """trigger a new run""" # always pass the runId in a dict() to requests run_id = self._get_run_id(run_number) data = {'runId': run_id} if failed_only: data['rerunFailedOnly'] = True response = fetch(NEW_BUILD_URL, verb='post', headers=HEADERS, data=data, timeout=TIMEOUT) if not response: raise Exception("Unable to POST %r to %r" % (data, NEW_BUILD_URL)) check_response(response) return response
def rebuild(self, run_id, failed_only=False): if failed_only: api_version = '6.0-preview.1' data = '{"state":"retry"}' stages = [ s['identifier'] for s in self.stages if s['result'] != 'succeeded' ] else: api_version = '6.1-preview.1' data = '{"state":"retry","forceRetryAllJobs":true}' stages = [s['identifier'] for s in self.stages] for stage in stages: if stage == 'Summary': continue url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=%s' % ( run_id, stage, api_version) resp = fetch( url, verb='patch', headers=HEADERS, data=data, timeout=TIMEOUT, auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN), ) if not resp: data = '{"resources":{"repositories":{"self":{"refName": "refs/pull/%s/head"}}}}' % self._iw.number url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/pipelines/20/runs?api-version=6.0-preview.1' resp = fetch( url, verb='post', headers=HEADERS, data=data, timeout=30, auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN), ) if not resp: raise Exception("Unable to POST %r to %r" % (data, url)) break check_response(resp)
def cancel(self, run_id): data = '{"state":"cancel"}' for stage in [ s['identifier'] for s in self.stages if s['state'] != 'completed' ]: if stage == 'Summary': continue url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=6.0-preview.1' % ( run_id, stage) resp = fetch( url, verb='patch', headers=HEADERS, data=data, timeout=TIMEOUT, auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN), ) if not resp: raise Exception("Unable to PATCH %r to %r" % (data, url)) check_response(resp)