def get(self): urlfetch.set_default_fetch_deadline(120) bq = bigquery.BigQuery() current_events = [] events = [] for master_name in constants.MASTER_NAMES: builders = buildbot.Builders(master_name) available_builds = _AvailableBuilds(builders) recorded_builds = _RecordedBuilds(bq, builders, available_builds) for builder in builders: # Filter out recorded builds from available builds. build_numbers = (available_builds[builder.name] - recorded_builds[builder.name]) builder_current_events, builder_events = _TraceEventsForBuilder( builder, build_numbers) current_events += builder_current_events events += builder_events jobs = [] if current_events: jobs += bq.InsertRowsAsync( constants.DATASET, constants.CURRENT_BUILDS_TABLE, current_events, truncate=True) if events: jobs += bq.InsertRowsAsync(constants.DATASET, constants.BUILDS_TABLE, events) for job in jobs: bq.PollJob(job, 60 * 20) # 20 minutes.
def get(self): urlfetch.set_default_fetch_deadline(60) try: filters = query_filter.Filters(self.request) except ValueError as e: self.response.headers['Content-Type'] = 'application/json' self.response.out.write({'error': str(e)}) return query_results = _QueryEvents(bigquery.BigQuery(), **filters) trace_events = list(_ConvertQueryEventsToTraceEvents(query_results)) self.response.headers['Content-Type'] = 'application/json' self.response.out.write(json.dumps(trace_events, separators=(',', ':')))