def get_build_numbers(job_dir, before, indirect): try: if '/pull/' in job_dir and not indirect: raise ValueError('bad code path for PR build list') # If we have latest-build.txt, we can skip an expensive GCS ls call! if before: latest_build = int(before) - 1 else: latest_build = int(gcs_async.read(job_dir + 'latest-build.txt').get_result()) # latest-build.txt has the most recent finished build. There might # be newer builds that have started but not finished. Probe for them. suffix = '/started.json' if not indirect else '.txt' while gcs_async.read('%s%s%s' % (job_dir, latest_build + 1, suffix)).get_result(): latest_build += 1 return range(latest_build, max(0, latest_build - 40), -1) except (ValueError, TypeError): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) if indirect: # find numbered builds builds = [re.search(r'/(\d*)\.txt$', f.filename) for f in fstats if not f.is_dir] builds = [m.group(1) for m in builds if m] else: builds = [os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir] if before and before in builds: builds = builds[builds.index(before) + 1:] return builds[:40]
def pr_builds(path): """Return {job: [(build, {started.json}, {finished.json})]} for each job under gcs path.""" jobs_dirs_fut = gcs_async.listdirs(path) def base(path): return os.path.basename(os.path.dirname(path)) jobs_futures = [(job, gcs_async.listdirs(job)) for job in jobs_dirs_fut.get_result()] futures = [] for job, builds_fut in jobs_futures: for build in builds_fut.get_result(): futures.append([ base(job), base(build), gcs_async.read('/%sstarted.json' % build), gcs_async.read('/%sfinished.json' % build)]) futures.sort(key=lambda (job, build, s, f): (job, view_base.pad_numbers(build)), reverse=True) jobs = {} for job, build, started_fut, finished_fut in futures: started, finished = view_build.normalize_metadata(started_fut, finished_fut) jobs.setdefault(job, []).append((build, started, finished)) return jobs
def get_build_numbers(job_dir, before, indirect): try: if 'pr-logs' in job_dir and not indirect: raise ValueError('bad code path for PR build list') # If we have latest-build.txt, we can skip an expensive GCS ls call! if before: latest_build = int(before) - 1 else: latest_build = int(gcs_async.read(job_dir + 'latest-build.txt').get_result()) # latest-build.txt has the most recent finished build. There might # be newer builds that have started but not finished. Probe for them. suffix = '/started.json' if not indirect else '.txt' while gcs_async.read('%s%s%s' % (job_dir, latest_build + 1, suffix)).get_result(): latest_build += 1 return range(latest_build, max(0, latest_build - 40), -1) except (ValueError, TypeError): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) if indirect: # find numbered builds builds = [re.search(r'/(\d*)\.txt$', f.filename) for f in fstats if not f.is_dir] builds = [m.group(1) for m in builds if m] else: builds = [os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir] if before and before in builds: builds = builds[builds.index(before) + 1:] return builds[:40]
def get(self, prefix, job): job_dir = '/%s/%s/' % (prefix, job) fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) self.render('build_list.html', dict(job=job, job_dir=job_dir, fstats=fstats))
def get(self, prefix, job): job_dir = '/%s/%s/' % (prefix, job) fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) self.render('build_list.html', dict(job=job, job_dir=job_dir, fstats=fstats))
def pr_builds(path): """Return {job: [(build, {started.json}, {finished.json})]} for each job under gcs path.""" jobs_dirs_fut = gcs_async.listdirs(path) def base(path): return os.path.basename(os.path.dirname(path)) jobs_futures = [(job, gcs_async.listdirs(job)) for job in jobs_dirs_fut.get_result()] futures = [] for job, builds_fut in jobs_futures: for build in builds_fut.get_result(): futures.append([ base(job), base(build), gcs_async.read('/%sstarted.json' % build), gcs_async.read('/%sfinished.json' % build)]) futures.sort(key=lambda (job, build, s, f): (job, view_base.pad_numbers(build)), reverse=True) jobs = {} for job, build, started_fut, finished_fut in futures: started = started_fut.get_result() finished = finished_fut.get_result() if started is not None: started = json.loads(started) if finished is not None: finished = json.loads(finished) jobs.setdefault(job, []).append((build, started, finished)) return jobs
def build_list(job_dir, before): ''' Given a job dir, give a (partial) list of recent build finished.jsons. Args: job_dir: the GCS path holding the jobs Returns: a list of [(build, finished)]. build is a string like "123", finished is either None or a dict of the finished.json. ''' latest_fut = gcs_async.read(job_dir + 'latest-build.txt') try: if 'pr-logs' in job_dir: raise ValueError('bad code path for PR build list') # If we have latest-build.txt, we can skip an expensive GCS ls call! latest_build = int(latest_fut.get_result()) if before: latest_build = int(before) - 1 else: # latest-build.txt has the most recent finished build. There might # be newer builds that have started but not finished. Probe for them. while gcs_async.read('%s%s/started.json' % (job_dir, latest_build + 1)).get_result(): latest_build += 1 builds = range(latest_build, max(0, latest_build - 40), -1) except (ValueError, TypeError): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) builds = [ os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir ] if before and before in builds: builds = builds[builds.index(before) + 1:] builds = builds[:40] build_futures = [(build, gcs_async.read('%s%s/started.json' % (job_dir, build)), gcs_async.read('%s%s/finished.json' % (job_dir, build))) for build in builds] def resolve(future): res = future.get_result() if res: return json.loads(res) return [(str(build), resolve(started), resolve(finished)) for build, started, finished in build_futures]
def get_build_numbers(job_dir, before, indirect): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) if indirect: # find numbered builds builds = [re.search(r'/(\d*)\.txt$', f.filename) for f in fstats if not f.is_dir] builds = [m.group(1) for m in builds if m] else: builds = [os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir] if before and before in builds: builds = builds[builds.index(before) + 1:] return builds[:40]
def get_build_numbers(job_dir, before, indirect): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) if indirect: # find numbered builds builds = [ re.search(r'/(\d*)\.txt$', f.filename) for f in fstats if not f.is_dir ] builds = [m.group(1) for m in builds if m] else: builds = [ os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir ] if before and before in builds: builds = builds[builds.index(before) + 1:] return builds[:40]
def pr_builds(path, pr): """ Get information for all builds run by a PR. Args: pr: the PR number Returns: A dictionary of {job: [(build_number, started_json, finished.json)]} """ jobs_dirs_fut = gcs_async.listdirs('%s/%s%s' % (PR_PREFIX, path, pr)) print '%s/%s%s' % (PR_PREFIX, path, pr) def base(path): return os.path.basename(os.path.dirname(path)) jobs_futures = [(job, gcs_async.listdirs(job)) for job in jobs_dirs_fut.get_result()] futures = [] for job, builds_fut in jobs_futures: for build in builds_fut.get_result(): futures.append([ base(job), base(build), gcs_async.read('/%sstarted.json' % build), gcs_async.read('/%sfinished.json' % build) ]) futures.sort(key=lambda (job, build, s, f): (job, view_base.pad_numbers(build)), reverse=True) jobs = {} for job, build, started_fut, finished_fut in futures: started = started_fut.get_result() finished = finished_fut.get_result() if started is not None: started = json.loads(started) if finished is not None: finished = json.loads(finished) jobs.setdefault(job, []).append((build, started, finished)) return jobs
def pr_builds(path, pr): """ Get information for all builds run by a PR. Args: pr: the PR number Returns: A dictionary of {job: [(build_number, started_json, finished.json)]} """ jobs_dirs_fut = gcs_async.listdirs('%s/%s%s' % (PR_PREFIX, path, pr)) print '%s/%s%s' % (PR_PREFIX, path, pr) def base(path): return os.path.basename(os.path.dirname(path)) jobs_futures = [(job, gcs_async.listdirs(job)) for job in jobs_dirs_fut.get_result()] futures = [] for job, builds_fut in jobs_futures: for build in builds_fut.get_result(): futures.append([ base(job), base(build), gcs_async.read('/%sstarted.json' % build), gcs_async.read('/%sfinished.json' % build)]) futures.sort(key=lambda (job, build, s, f): (job, view_base.pad_numbers(build)), reverse=True) jobs = {} for job, build, started_fut, finished_fut in futures: started = started_fut.get_result() finished = finished_fut.get_result() if started is not None: started = json.loads(started) if finished is not None: finished = json.loads(finished) jobs.setdefault(job, []).append((build, started, finished)) return jobs
def test_pad_numbers(self): self.assertEqual(view_base.pad_numbers('a3b45'), 'a' + '0' * 15 + '3b' + '0' * 14 + '45')