def main(): # TODO: add options to limit the date range to fetch # TODO: add options to display summary of fetched info. remote_api.EnableFinditRemoteApi() try: all_analyses, stored_start, stored_end, time_records = _LoadAnalyses() except IOError: all_analyses = _FetchAnalyses(START_DATE, END_DATE) time_records = {} _SaveAnalyses(all_analyses, time_records) else: if stored_start > START_DATE: all_analyses = _FetchAnalyses(START_DATE, stored_start) + all_analyses if END_DATE > stored_end: all_analyses += _FetchAnalyses(stored_end, END_DATE) _SaveAnalyses(all_analyses, time_records) saved_count = len(time_records) q = Queue.Queue() r = Queue.Queue() for _ in range(THREAD_COUNT): t = Thread(target=_GetTimes, args=(q, r)) t.daemon = True t.start() for a in all_analyses: if a.key not in time_records.keys(): q.put(a) while not (q.empty() and r.empty()): key, record = r.get() time_records[key] = _Denest(record) print len(time_records) if saved_count + THREAD_COUNT < len(time_records): _SaveAnalyses(all_analyses, time_records) saved_count = len(time_records) print 'Saved %d results' % saved_count q.join()
import argparse import collections import datetime import json import os import re import sys # Append path of Findit root directory to import remote_api. _FINDIT_DIR = os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir) sys.path.insert(0, _FINDIT_DIR) # Activate script as findit prod and set up sys.path properly. from local_libs import remote_api remote_api.EnableFinditRemoteApi() # Add GAE and Findit imports below. from google.appengine.ext import db from google.appengine.ext import ndb from libs import analysis_status from libs import time_util from model.flake.analysis.master_flake_analysis import MasterFlakeAnalysis from pipeline.models import _PipelineRecord from pipeline import pipeline _FLAKE_URL_TEMPLATE = ('https://analysis.chromium.org' '/p/chromium/flake-portal/analysis/analyze?key=%s')