def main(): try: settings = startup.read_settings(filename="file2db_settings.json") Log.start(settings.debug) with DB(settings.db) as db: db.execute(""" DROP TABLE IF EXISTS b2g_tests """) db.execute(""" CREATE TABLE b2g_tests ( id INTEGER PRIMARY KEY NOT NULL, branch VARCHAR(100), name VARCHAR(100), version VARCHAR(100), suite varchar(200), revision varchar(100), `date` LONG ) """) file2db(db, "b2g_tests", settings.source_file) except Exception, e: Log.error("can not seem to startup", e)
def check_for_errors(self, logs, path): try: errors = [l for l in logs if l.type == "ERROR"] if errors: Log.error("Problem found in {{page}}:\n{{error|indent}}", { "page": path, "error": errors[0] }) finally: self.close()
def check_for_errors(self, logs, path): try: errors = [l for l in logs if l.type == "ERROR"] if errors: Log.error("Problem found in {{page}}:\n{{error|indent}}", { "page": path, "error": errors[0] }) finally: self.close()
def main(): try: settings = startup.read_settings(defs=[{ "name": ["--no_restart", "--no_reset", "--no_redo", "--norestart", "--noreset", "--noredo"], "help": "do not allow creation of new index (for debugging rouge resets)", "action": "store_true", "dest": "no_restart" }, { "name": ["--restart", "--reset", "--redo"], "help": "force a reprocessing of all data", "action": "store_true", "dest": "restart" }, { "name": ["--file", "--scan_file", "--scanfile", "--use_file", "--usefile"], "help": "scan file for missing ids", "action": "store_true", "dest": "scan_file" }, { "name": ["--nofile", "--no_file", "--no-file"], "help": "do not scan file for missing ids", "action": "store_false", "dest": "scan_file" }]) Log.start(settings.debug) with startup.SingleInstance(flavor_id=settings.args.filename): settings.production.threads = nvl(settings.production.threads, 1) settings.param.output_file = nvl(settings.param.output_file, "./results/raw_json_blobs.tab") transformer = DZ_to_ES(settings.pushlog) #RESET ONLY IF NEW Transform IS USED if settings.args.restart: es = Cluster(settings.elasticsearch).create_index(settings.elasticsearch) es.add_alias() es.delete_all_but_self() extract_from_datazilla_using_id(es, settings, transformer) else: es = Cluster(settings.elasticsearch).get_or_create_index(settings.elasticsearch) extract_from_datazilla_using_id(es, settings, transformer) except Exception, e: Log.error("Problem with etl", e)
def _wait_for_stable(self, detect_function, timeout): """ WAIT FOR RESULTS OF detect_function TO BE STABLE """ if not isinstance(timeout, timedelta): Log.error("Expecting a timeout as a timedelta") detectTime = Date.now() oldValue = "probably never an initial value" newValue = detect_function() while True: now = Date.now() potentialValue = detect_function() if potentialValue != newValue: oldValue = newValue newValue = potentialValue detectTime = now if now - detectTime > timeout: return Thread.sleep(seconds=0.5)
def _wait_for_stable(self, detect_function, timeout): """ WAIT FOR RESULTS OF detect_function TO BE STABLE """ if not isinstance(timeout, timedelta): Log.error("Expecting a timeout as a timedelta") detectTime = Date.now() oldValue = "probably never an initial value" newValue = detect_function() while True: now = Date.now() potentialValue = detect_function() if potentialValue != newValue: oldValue = newValue newValue = potentialValue detectTime = now if now - detectTime > timeout: return Thread.sleep(seconds=0.5)
def check_if_still_loading(self, path): # IF SPINNER STILL SHOWS, THEN WE GOT LOADING ISSUES isLoading = OR([e.is_displayed() for e in self.find(".loading")]) if isLoading: Log.error("page still loading: {{page}}", {"page": path})
def check_if_still_loading(self, path): # IF SPINNER STILL SHOWS, THEN WE GOT LOADING ISSUES isLoading = OR([e.is_displayed() for e in self.find(".loading")]) if isLoading: Log.error("page still loading: {{page}}", {"page": path})
new_record = Struct( test_machine=r.test_machine, datazilla=r.datazilla, testrun=r.testrun, test_build=r.test_build, result={ "test_name": "summary_old", "ordering": -1, "stats": Stats(samples=Q.sort(total.mean)[:len(total)-1:]) } ) new_records.append(new_record) return new_records except Exception, e: Log.error("Transformation failure on id={{id}}", {"id":id}, e) def stats(values): """ RETURN LOTS OF AGGREGATES """ if values == None: return None values = values.map(float, includeNone=False) z = ZeroMoment.new_instance(values) s = Struct() for k, v in z.dict.items(): s[k] = v
{"id": id} for id in Q.sort(missing_ids)[:nvl(settings.production.step, NUM_PER_BATCH):] ]) for result in results: if not result: num_not_found += 1 if num_not_found > nvl(settings.production.max_tries, 10): many.inbound.pop_all() # CLEAR THE QUEUE OF OTHER WORK many.stop() break else: num_not_found = 0 except (KeyboardInterrupt, SystemExit): Log.println("Shutdown Started, please be patient") except Exception, e: Log.error("Unusual shutdown!", e) #FINISH ES SETUP SO IT CAN BE QUERIED es.set_refresh_interval(1) es.delete_all_but_self() es.add_alias() def main(): try: settings = startup.read_settings(defs=[{ "name": ["--no_restart", "--no_reset", "--no_redo", "--norestart", "--noreset", "--noredo"], "help": "do not allow creation of new index (for debugging rouge resets)", "action": "store_true", "dest": "no_restart" }, {