def find_log_junit(build_dir, junit, log_file): ''' Looks in build_dir for log_file in a folder that also includes the junit file. ''' tmps = [f.filename for f in view_base.gcs_ls('%s/artifacts' % build_dir) if '/tmp-node' in f.filename] for folder in tmps: filenames = [f.filename for f in view_base.gcs_ls(folder)] if folder + junit in filenames: path = folder + log_file if path in filenames: return path
def find_log_junit((build_dir, junit, log_file)): ''' Looks in build_dir for log_file in a folder that also includes the junit file. ''' tmps = [f.filename for f in view_base.gcs_ls('%s/artifacts' % build_dir) if '/tmp-node' in f.filename] for folder in tmps: filenames = [f.filename for f in view_base.gcs_ls(folder)] if folder + junit in filenames: path = folder + log_file if path in filenames: return path
def get(self, prefix, job): job_dir = '/%s/%s/' % (prefix, job) fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) self.render('build_list.html', dict(job=job, job_dir=job_dir, fstats=fstats))
def build_details(build_dir): """ Collect information from a build directory. Args: build_dir: GCS path containing a build's results. Returns: started: value from started.json {'version': ..., 'timestamp': ...} finished: value from finished.json {'timestamp': ..., 'result': ...} results: {total: int, failed: [(name, duration, text)...], skipped: [name...], passed: [name...]} """ started_fut = gcs_async.read(build_dir + '/started.json') finished = gcs_async.read(build_dir + '/finished.json').get_result() started = started_fut.get_result() if finished and not started: started = 'null' if started and not finished: finished = 'null' elif not (started and finished): return started = json.loads(started) finished = json.loads(finished) junit_paths = [f.filename for f in view_base.gcs_ls('%s/artifacts' % build_dir) if re.match(r'junit_.*\.xml', os.path.basename(f.filename))] junit_futures = {f: gcs_async.read(f) for f in junit_paths} parser = JUnitParser() for path, future in junit_futures.iteritems(): parser.parse_xml(future.get_result(), path) return started, finished, parser.get_results()
def get(self, prefix, job): job_dir = '/%s/%s/' % (prefix, job) fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) self.render('build_list.html', dict(job=job, job_dir=job_dir, fstats=fstats))
def get_build_numbers(job_dir, before, indirect): try: if '/pull/' in job_dir and not indirect: raise ValueError('bad code path for PR build list') # If we have latest-build.txt, we can skip an expensive GCS ls call! if before: latest_build = int(before) - 1 else: latest_build = int(gcs_async.read(job_dir + 'latest-build.txt').get_result()) # latest-build.txt has the most recent finished build. There might # be newer builds that have started but not finished. Probe for them. suffix = '/started.json' if not indirect else '.txt' while gcs_async.read('%s%s%s' % (job_dir, latest_build + 1, suffix)).get_result(): latest_build += 1 return range(latest_build, max(0, latest_build - 40), -1) except (ValueError, TypeError): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) if indirect: # find numbered builds builds = [re.search(r'/(\d*)\.txt$', f.filename) for f in fstats if not f.is_dir] builds = [m.group(1) for m in builds if m] else: builds = [os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir] if before and before in builds: builds = builds[builds.index(before) + 1:] return builds[:40]
def get_build_numbers(job_dir, before, indirect): try: if 'pr-logs' in job_dir and not indirect: raise ValueError('bad code path for PR build list') # If we have latest-build.txt, we can skip an expensive GCS ls call! if before: latest_build = int(before) - 1 else: latest_build = int(gcs_async.read(job_dir + 'latest-build.txt').get_result()) # latest-build.txt has the most recent finished build. There might # be newer builds that have started but not finished. Probe for them. suffix = '/started.json' if not indirect else '.txt' while gcs_async.read('%s%s%s' % (job_dir, latest_build + 1, suffix)).get_result(): latest_build += 1 return range(latest_build, max(0, latest_build - 40), -1) except (ValueError, TypeError): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) if indirect: # find numbered builds builds = [re.search(r'/(\d*)\.txt$', f.filename) for f in fstats if not f.is_dir] builds = [m.group(1) for m in builds if m] else: builds = [os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir] if before and before in builds: builds = builds[builds.index(before) + 1:] return builds[:40]
def get_all_logs((directory, artifacts)): ''' returns dictionary given the artifacts folder with the keys being the folders, and the values being the log files within the corresponding folder ''' log_files = {} if artifacts: dirs = [f.filename for f in view_base.gcs_ls('%s/artifacts' % directory) if f.is_dir] else: dirs = [directory] for d in dirs: log_files[d] = [] for f in view_base.gcs_ls(d): log_name = regex.log_re.search(f.filename) if log_name: log_files[d].append(f.filename) return log_files
def get_all_logs(directory, artifacts): ''' returns dictionary given the artifacts folder with the keys being the folders, and the values being the log files within the corresponding folder ''' log_files = {} if artifacts: dirs = [f.filename for f in view_base.gcs_ls('%s/artifacts' % directory) if f.is_dir] else: dirs = [directory] for d in dirs: log_files[d] = [] for f in view_base.gcs_ls(d): log_name = regex.log_re.search(f.filename) if log_name: log_files[d].append(f.filename) return log_files
def build_list(job_dir, before): ''' Given a job dir, give a (partial) list of recent build finished.jsons. Args: job_dir: the GCS path holding the jobs Returns: a list of [(build, finished)]. build is a string like "123", finished is either None or a dict of the finished.json. ''' latest_fut = gcs_async.read(job_dir + 'latest-build.txt') try: if 'pr-logs' in job_dir: raise ValueError('bad code path for PR build list') # If we have latest-build.txt, we can skip an expensive GCS ls call! latest_build = int(latest_fut.get_result()) if before: latest_build = int(before) - 1 else: # latest-build.txt has the most recent finished build. There might # be newer builds that have started but not finished. Probe for them. while gcs_async.read('%s%s/started.json' % (job_dir, latest_build + 1)).get_result(): latest_build += 1 builds = range(latest_build, max(0, latest_build - 40), -1) except (ValueError, TypeError): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) builds = [ os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir ] if before and before in builds: builds = builds[builds.index(before) + 1:] builds = builds[:40] build_futures = [(build, gcs_async.read('%s%s/started.json' % (job_dir, build)), gcs_async.read('%s%s/finished.json' % (job_dir, build))) for build in builds] def resolve(future): res = future.get_result() if res: return json.loads(res) return [(str(build), resolve(started), resolve(finished)) for build, started, finished in build_futures]
def build_details(build_dir): """ Collect information from a build directory. Args: build_dir: GCS path containing a build's results. Returns: started: value from started.json {'version': ..., 'timestamp': ...} finished: value from finished.json {'timestamp': ..., 'result': ...} failures: list of (name, duration, text) tuples build_log: a hilighted portion of errors in the build log. May be None. """ started_fut = gcs_async.read(build_dir + '/started.json') finished = gcs_async.read(build_dir + '/finished.json').get_result() started = started_fut.get_result() if finished and not started: started = 'null' if started and not finished: finished = 'null' elif not (started and finished): return started = json.loads(started) finished = json.loads(finished) failures = [] junit_paths = [ f.filename for f in view_base.gcs_ls('%s/artifacts' % build_dir) if re.match(r'junit_.*\.xml', os.path.basename(f.filename)) ] junit_futures = {} for f in junit_paths: junit_futures[gcs_async.read(f)] = f for future in junit_futures: junit = future.get_result() if junit is None: continue failures.extend(parse_junit(junit, junit_futures[future])) failures.sort() build_log = None if finished and finished.get('result') != 'SUCCESS' and len(failures) == 0: build_log = gcs_async.read(build_dir + '/build-log.txt').get_result() if build_log: build_log = log_parser.digest(build_log.decode('utf8', 'replace')) logging.info('fallback log parser emitted %d lines', build_log.count('\n')) return started, finished, failures, build_log
def get_build_numbers(job_dir, before, indirect): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) if indirect: # find numbered builds builds = [re.search(r'/(\d*)\.txt$', f.filename) for f in fstats if not f.is_dir] builds = [m.group(1) for m in builds if m] else: builds = [os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir] if before and before in builds: builds = builds[builds.index(before) + 1:] return builds[:40]
def build_details(build_dir): """ Collect information from a build directory. Args: build_dir: GCS path containing a build's results. Returns: started: value from started.json {'version': ..., 'timestamp': ...} finished: value from finished.json {'timestamp': ..., 'result': ...} failures: list of (name, duration, text) tuples build_log: a hilighted portion of errors in the build log. May be None. """ started_fut = gcs_async.read(build_dir + '/started.json') finished = gcs_async.read(build_dir + '/finished.json').get_result() started = started_fut.get_result() if finished and not started: started = 'null' if started and not finished: finished = 'null' elif not (started and finished): return started = json.loads(started) finished = json.loads(finished) failures = [] junit_paths = [f.filename for f in view_base.gcs_ls('%s/artifacts' % build_dir) if re.match(r'junit_.*\.xml', os.path.basename(f.filename))] junit_futures = {} for f in junit_paths: junit_futures[gcs_async.read(f)] = f for future in junit_futures: junit = future.get_result() if junit is None: continue failures.extend(parse_junit(junit, junit_futures[future])) failures.sort() build_log = None if finished and finished.get('result') != 'SUCCESS' and len(failures) == 0: build_log = gcs_async.read(build_dir + '/build-log.txt').get_result() if build_log: build_log = log_parser.digest(build_log.decode('utf8', 'replace')) logging.info('fallback log parser emitted %d lines', build_log.count('\n')) return started, finished, failures, build_log
def get_build_numbers(job_dir, before, indirect): fstats = view_base.gcs_ls(job_dir) fstats.sort(key=lambda f: view_base.pad_numbers(f.filename), reverse=True) if indirect: # find numbered builds builds = [ re.search(r'/(\d*)\.txt$', f.filename) for f in fstats if not f.is_dir ] builds = [m.group(1) for m in builds if m] else: builds = [ os.path.basename(os.path.dirname(f.filename)) for f in fstats if f.is_dir ] if before and before in builds: builds = builds[builds.index(before) + 1:] return builds[:40]
def build_details(build_dir): """ Collect information from a build directory. Args: build_dir: GCS path containing a build's results. Returns: started: value from started.json {'version': ..., 'timestamp': ...} finished: value from finished.json {'timestamp': ..., 'result': ...} failures: list of (name, duration, text) tuples build_log: a hilighted portion of errors in the build log. May be None. """ started_fut = gcs_async.read(build_dir + '/started.json') finished = gcs_async.read(build_dir + '/finished.json').get_result() started = started_fut.get_result() if finished and not started: started = 'null' if started and not finished: finished = 'null' elif not (started and finished): return started = json.loads(started) finished = json.loads(finished) failures = [] junit_paths = [ f.filename for f in view_base.gcs_ls('%s/artifacts' % build_dir) if re.match(r'junit_.*\.xml', os.path.basename(f.filename)) ] junit_futures = {} for f in junit_paths: junit_futures[gcs_async.read(f)] = f for future in junit_futures: junit = future.get_result() if not junit: continue failures.extend(parse_junit(junit, junit_futures[future])) failures.sort() return started, finished, failures
def build_details(build_dir): """ Collect information from a build directory. Args: build_dir: GCS path containing a build's results. Returns: started: value from started.json {'version': ..., 'timestamp': ...} finished: value from finished.json {'timestamp': ..., 'result': ...} failures: list of (name, duration, text) tuples build_log: a hilighted portion of errors in the build log. May be None. """ started_fut = gcs_async.read(build_dir + '/started.json') finished = gcs_async.read(build_dir + '/finished.json').get_result() started = started_fut.get_result() if finished and not started: started = 'null' if started and not finished: finished = 'null' elif not (started and finished): return started = json.loads(started) finished = json.loads(finished) failures = [] junit_paths = [f.filename for f in view_base.gcs_ls('%s/artifacts' % build_dir) if re.match(r'junit_.*\.xml', os.path.basename(f.filename))] junit_futures = {} for f in junit_paths: junit_futures[gcs_async.read(f)] = f for future in junit_futures: junit = future.get_result() if not junit: continue failures.extend(parse_junit(junit, junit_futures[future])) failures.sort() return started, finished, failures
def build_details(build_dir, recursive=False): """ Collect information from a build directory. Args: build_dir: GCS path containing a build's results. recursive: Whether to scan artifacts recursively for XML files. Returns: started: value from started.json {'version': ..., 'timestamp': ...} finished: value from finished.json {'timestamp': ..., 'result': ...} results: {total: int, failed: [(name, duration, text)...], skipped: [name...], passed: [name...]} """ started, finished = normalize_metadata( gcs_async.read(build_dir + '/started.json'), gcs_async.read(build_dir + '/finished.json')) if started is None and finished is None: return started, finished, None if recursive: artifact_paths = view_base.gcs_ls_recursive('%s/artifacts' % build_dir) else: artifact_paths = view_base.gcs_ls('%s/artifacts' % build_dir) junit_paths = [ f.filename for f in artifact_paths if f.filename.endswith('.xml') ] junit_futures = {f: gcs_async.read(f) for f in junit_paths} parser = JUnitParser() for path, future in junit_futures.iteritems(): parser.parse_xml(future.get_result(), path) return started, finished, parser.get_results()
def build_details(build_dir, recursive=False): """ Collect information from a build directory. Args: build_dir: GCS path containing a build's results. recursive: Whether to scan artifacts recursively for XML files. Returns: started: value from started.json {'version': ..., 'timestamp': ...} finished: value from finished.json {'timestamp': ..., 'result': ...} results: {total: int, failed: [(name, duration, text)...], skipped: [name...], passed: [name...]} """ started, finished = normalize_metadata( gcs_async.read(build_dir + '/started.json'), gcs_async.read(build_dir + '/finished.json') ) if started is None and finished is None: return started, finished, None if recursive: artifact_paths = view_base.gcs_ls_recursive('%s/artifacts' % build_dir) else: artifact_paths = view_base.gcs_ls('%s/artifacts' % build_dir) junit_paths = [f.filename for f in artifact_paths if f.filename.endswith('.xml')] junit_futures = {f: gcs_async.read(f) for f in junit_paths} parser = JUnitParser() for path, future in junit_futures.iteritems(): parser.parse_xml(future.get_result(), path) return started, finished, parser.get_results()
def get(self, prefix): jobs_dir = '/%s' % prefix fstats = view_base.gcs_ls(jobs_dir) fstats.sort() self.render('job_list.html', dict(jobs_dir=jobs_dir, fstats=fstats))
def get(self, prefix): jobs_dir = '/%s' % prefix fstats = view_base.gcs_ls(jobs_dir) fstats.sort() self.render('job_list.html', dict(jobs_dir=jobs_dir, fstats=fstats))