def get_node_jobs(self, count=5): url = urlparse.urljoin( base_url, '/nodes/{0}/jobs/?count={1}'.format(self.name, count)) resp = requests.get(url) jobs = resp.json() for job in jobs: prettify_job(job) self.node['jobs'] = jobs return self.node
def get_node_jobs(self, count=20, page=None): page = page or 1 url = urlparse.urljoin( base_url, '/nodes/{0}/jobs/?count={1}&page={2}'.format( self.name, count, page)) resp = requests.get(url) jobs = resp.json() for job in jobs: prettify_job(job) self.node['jobs'] = jobs return self.node
def get_node_jobs(self, count=20, page=None): page = page or 1 url = urlparse.urljoin( base_url, '/nodes/{0}/jobs/?count={1}&page={2}'.format( self.name, count, page) ) resp = requests.get(url) jobs = resp.json() for job in jobs: prettify_job(job) self.node['jobs'] = jobs return self.node
def __init__(self, run_name, job_id): self.run_name = run_name self.job_id = job_id url = urljoin( base_url, "/runs/{0}/jobs/{1}/".format(run_name, job_id) ) resp = requests.get(url) if resp.status_code == 400: error('/errors/invalid/') elif resp.status_code == 404: error('/errors/not_found/') else: self.job = resp.json() prettify_job(self.job)
def get_run(self): url = urljoin(base_url, '/runs/%s/' % self.name) resp = requests.get(url) if resp.status_code == 404: error('/errors/not_found/', 'requested run does not exist') else: run = resp.json() if 'scheduled' in run: run['scheduled_day'] = run['scheduled'].split()[0] if 'jobs' in run: for job in run['jobs']: prettify_job(job) prettify_run(run) self.run = run return self.run
def index(self, suite, branch, since=None, count=3): """ Ask paddles for a list of runs of ``suite`` on ``branch``, then build a dict that looks like: {'runs': [ {'name': run_name, 'jobs': [ job_description: { 'job_id': job_id, 'status': status } ]} ] 'descriptions': [ job_description, ] } """ url = urlparse.urljoin( base_url, '/runs/branch/{branch}/suite/{suite}/?count={count}'.format( branch=branch, suite=suite, count=str(count)) ) if since: url += '&since=' + since runs = requests.get(url).json() full_info = dict( branch=branch, suite=suite, since=since, runs=list(), ) descriptions = set() for run in runs: run_info = dict() url = urlparse.urljoin( base_url, '/runs/{0}/jobs/?fields=job_id,description,status,log_href,failure_reason'.format( # noqa run['name']) ) resp = requests.get(url) if resp.status_code == 404: error('/errors/not_found/') else: jobs = resp.json() run_info['name'] = run['name'] run_info['scheduled'] = run['scheduled'] run_info['jobs'] = dict() for job in jobs: description = job.pop('description') prettify_job(job) descriptions.add(description) run_info['jobs'][description] = job full_info['runs'].append(run_info) full_info['runs'].reverse() full_info['descriptions'] = sorted(list(descriptions)) return full_info