def handle_project(self, project, **options): def to_seconds(td): return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6 numdays = int(options.get("numdays", 1)) now = int(time.time()) time_constraint = now - to_seconds(timedelta(numdays)) mtm = MetricsTestModel(project) test_run_ids = mtm.get_test_runs_not_in_all_dimensions(time_constraint) self.stdout.write("test run ids {0}\n".format(str(len(test_run_ids)))) #Make a list of test_run_id chunks to iterate over chunk_size = 20 test_run_id_chunks = [ test_run_ids[i:i + chunk_size] for i in range(0, len(test_run_ids), chunk_size) ] plm = PushLogModel() for ids in test_run_id_chunks: self.stdout.write("Processing ids {0}\n".format(str(ids))) revisions_without_push_data = mtm.load_test_data_all_dimensions( ids) if revisions_without_push_data: revision_nodes = {} for revision in revisions_without_push_data: node = plm.get_node_from_revision( revision, revisions_without_push_data[revision]) revision_nodes[revision] = node mtm.set_push_data_all_dimensions(revision_nodes) plm.disconnect() mtm.disconnect()
def handle_project(self, project, **options): self.stdout.write("Processing project {0}\n".format(project)) pushlog_project = options.get("pushlog_project", 'pushlog') loadlimit = int(options.get("loadlimit", 1)) debug = options.get("debug", None) test_run_ids = [] ptm = PerformanceTestModel(project) test_run_ids = ptm.process_objects(loadlimit) ptm.disconnect() """ metrics_exclude_projects = set(['b2g', 'games', 'jetperf', 'marketapps', 'microperf', 'stoneridge', 'test', 'webpagetest']) if project not in metrics_exclude_projects: #minimum required number of replicates for #metrics processing replicate_min = 5 compute_test_run_metrics( project, pushlog_project, debug, replicate_min, test_run_ids ) """ mtm = MetricsTestModel(project) revisions_without_push_data = mtm.load_test_data_all_dimensions( test_run_ids) if revisions_without_push_data: revision_nodes = {} plm = PushLogModel(pushlog_project) for revision in revisions_without_push_data: node = plm.get_node_from_revision( revision, revisions_without_push_data[revision]) revision_nodes[revision] = node plm.disconnect() mtm.set_push_data_all_dimensions(revision_nodes) mtm.disconnect()
def handle_project(self, project, **options): def to_seconds(td): return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6 numdays = int(options.get("numdays", 1)) now = int(time.time()) time_constraint = now - to_seconds(timedelta(numdays)) mtm = MetricsTestModel(project) test_run_ids = mtm.get_test_runs_not_in_all_dimensions(time_constraint) self.stdout.write("test run ids {0}\n".format(str(len(test_run_ids)))) #Make a list of test_run_id chunks to iterate over chunk_size = 20 test_run_id_chunks = [ test_run_ids[i:i + chunk_size] for i in range(0, len(test_run_ids), chunk_size) ] plm = PushLogModel() for ids in test_run_id_chunks: self.stdout.write("Processing ids {0}\n".format(str(ids))) revisions_without_push_data = mtm.load_test_data_all_dimensions(ids) if revisions_without_push_data: revision_nodes = {} for revision in revisions_without_push_data: node = plm.get_node_from_revision( revision, revisions_without_push_data[revision]) revision_nodes[revision] = node mtm.set_push_data_all_dimensions(revision_nodes) plm.disconnect() mtm.disconnect()