def test_get_performance_signatures(self): pc = PerfherderClient() url = pc._get_endpoint_url(pc.PERFORMANCE_SIGNATURES_ENDPOINT, project='mozilla-central') content = { 'signature1': { 'cheezburgers': 1 }, 'signature2': { 'hamburgers': 2 }, 'signature3': { 'cheezburgers': 2 } } responses.add(responses.GET, url, json=content, match_querystring=True, status=200) sigs = pc.get_performance_signatures('mozilla-central') self.assertEqual(len(sigs), 3) self.assertEqual(sigs.get_signature_hashes(), ['signature1', 'signature2', 'signature3']) self.assertEqual(sigs.get_property_names(), set(['cheezburgers', 'hamburgers'])) self.assertEqual(sigs.get_property_values('cheezburgers'), set([1, 2]))
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Need to (only) specify project/branch") project = args[0] time_interval = options['time_interval'] pc = PerfherderClient(server_url=options['server']) signatures = pc.get_performance_signatures(project, interval=time_interval) if options['filter_props']: for kv in options['filter_props']: if ':' not in kv or len(kv) < 3: raise CommandError("Must specify --filter-props as " "'key:value'") k, v = kv.split(':') signatures = signatures.filter((k, v)) with concurrent.futures.ProcessPoolExecutor( options['num_workers']) as executor: futures = [] # add signatures without parents first, then those with parents with_parents = [] for signature_hash in signatures.get_signature_hashes(): if 'parent_signature' in signatures[signature_hash]: with_parents.append(signature_hash) else: futures.append( executor.submit(_add_series, pc, project, signature_hash, signatures[signature_hash], options['verbosity'], time_interval=time_interval)) for signature_hash in with_parents: parent_hash = signatures[signature_hash]['parent_signature'] futures.append( executor.submit(_add_series, pc, project, signature_hash, signatures[signature_hash], options['verbosity'], time_interval=time_interval, parent_hash=parent_hash)) for future in futures: try: future.result() except Exception as e: self.stderr.write("FAIL: {}".format(e)) # shutdown any pending tasks and exit (if something # is in progress, no wait to stop it) executor.shutdown(wait=False) for future in futures: future.cancel() raise CommandError( "Failed to import performance data: {}".format(e))
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Need to (only) specify project/branch") project = args[0] server_params = urlparse(options['server']) time_interval = options['time_interval'] pc = PerfherderClient(protocol=server_params.scheme, host=server_params.netloc) signatures = pc.get_performance_signatures( project, interval=time_interval) if options['filter_props']: for kv in options['filter_props']: if ':' not in kv or len(kv) < 3: raise CommandError("Must specify --filter-props as " "'key:value'") k, v = kv.split(':') signatures = signatures.filter((k, v)) with concurrent.futures.ProcessPoolExecutor( options['num_workers']) as executor: futures = [] # add signatures without parents first, then those with parents with_parents = [] for signature_hash in signatures.get_signature_hashes(): if 'parent_signature' in signatures[signature_hash]: with_parents.append(signature_hash) else: futures.append(executor.submit(_add_series, pc, project, signature_hash, signatures[signature_hash], options['verbosity'], time_interval=time_interval)) for signature_hash in with_parents: parent_hash = signatures[signature_hash]['parent_signature'] futures.append(executor.submit(_add_series, pc, project, signature_hash, signatures[signature_hash], options['verbosity'], time_interval=time_interval, parent_hash=parent_hash)) for future in futures: try: future.result() except Exception as e: self.stderr.write("FAIL: {}".format(e)) # shutdown any pending tasks and exit (if something # is in progress, no wait to stop it) executor.shutdown(wait=False) for future in futures: future.cancel() raise CommandError( "Failed to import performance data: {}".format(e))
def test_get_performance_signatures(self, mock_get): mock_get.return_value = self._get_mock_response( {"signature1": {"cheezburgers": 1}, "signature2": {"hamburgers": 2}, "signature3": {"cheezburgers": 2}} ) pc = PerfherderClient() sigs = pc.get_performance_signatures("mozilla-central") self.assertEqual(len(sigs), 3) self.assertEqual(sigs.get_signature_hashes(), ["signature1", "signature2", "signature3"]) self.assertEqual(sigs.get_property_names(), set(["cheezburgers", "hamburgers"])) self.assertEqual(sigs.get_property_values("cheezburgers"), set([1, 2]))
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Need to (only) specify project/branch") project = args[0] server_params = urlparse(options['server']) pc = PerfherderClient(protocol=server_params.scheme, host=server_params.netloc) signatures = pc.get_performance_signatures( project, time_interval=PerformanceTimeInterval.NINETY_DAYS) if options['filter_props']: for kv in options['filter_props']: if ':' not in kv or len(kv) < 3: raise CommandError("Must specify --filter-props as " "'key:value'") k, v = kv.split(':') signatures = signatures.filter((k, v)) if options['time_interval'] is None: time_intervals = PerformanceTimeInterval.all_valid_time_intervals() else: time_intervals = [options['time_interval']] with concurrent.futures.ProcessPoolExecutor( options['num_workers']) as executor: futures = [] for signature_hash in signatures.get_signature_hashes(): futures.append(executor.submit(_add_series, server_params, project, time_intervals, signature_hash, signatures[signature_hash], options['mysql_debug'], options['verbose'])) for future in futures: try: future.result() except Exception, e: self.stderr.write("FAIL: {}".format(e)) # shutdown any pending tasks and exit (if something # is in progress, no wait to stop it) executor.shutdown(wait=False) for future in futures: future.cancel() raise CommandError( "Failed to import performance data: {}".format(e))
def test_get_performance_signatures(self, mock_get): mock_get.return_value = self._get_mock_response( {'signature1': {'cheezburgers': 1}, 'signature2': {'hamburgers': 2}, 'signature3': {'cheezburgers': 2}}) pc = PerfherderClient() sigs = pc.get_performance_signatures('mozilla-central') self.assertEqual(len(sigs), 3) self.assertEqual(sigs.get_signature_hashes(), ['signature1', 'signature2', 'signature3']) self.assertEqual(sigs.get_property_names(), set(['cheezburgers', 'hamburgers'])) self.assertEqual(sigs.get_property_values('cheezburgers'), set([1, 2]))
def test_get_performance_signatures(self, mock_get): mock_get.return_value = self._get_mock_response( {'signature1': {'cheezburgers': 1}, 'signature2': {'hamburgers': 2}, 'signature3': {'cheezburgers': 2}}) pc = PerfherderClient() sigs = pc.get_performance_signatures('mozilla-central') self.assertEqual(len(sigs), 3) self.assertEqual(sigs.get_signature_hashes(), ['signature1', 'signature2', 'signature3']) self.assertEqual(sigs.get_property_names(), set(['cheezburgers', 'hamburgers'])) self.assertEqual(sigs.get_property_values('cheezburgers'), set([1, 2]))
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Need to (only) specify project/branch") project = args[0] server_params = urlparse(options['server']) pc = PerfherderClient(protocol=server_params.scheme, host=server_params.netloc) signatures = pc.get_performance_signatures( project, time_interval=PerformanceTimeInterval.NINETY_DAYS) if options['filter_props']: for kv in options['filter_props']: if ':' not in kv or len(kv) < 3: raise CommandError("Must specify --filter-props as " "'key:value'") k, v = kv.split(':') signatures = signatures.filter((k, v)) if options['time_interval'] is None: time_intervals = PerformanceTimeInterval.all_valid_time_intervals() else: time_intervals = [options['time_interval']] with concurrent.futures.ProcessPoolExecutor( options['num_workers']) as executor: futures = [] for signature_hash in signatures.get_signature_hashes(): futures.append( executor.submit(_add_series, server_params, project, time_intervals, signature_hash, signatures[signature_hash], options['mysql_debug'], options['verbose'])) for future in futures: try: future.result() except Exception, e: self.stderr.write("FAIL: {}".format(e)) # shutdown any pending tasks and exit (if something # is in progress, no wait to stop it) executor.shutdown(wait=False) for future in futures: future.cancel() raise CommandError( "Failed to import performance data: {}".format(e))
def test_get_performance_signatures(self): pc = PerfherderClient() url = pc._get_project_uri('mozilla-central', pc.PERFORMANCE_SIGNATURES_ENDPOINT) content = { 'signature1': {'cheezburgers': 1}, 'signature2': {'hamburgers': 2}, 'signature3': {'cheezburgers': 2} } responses.add(responses.GET, url, json=content, match_querystring=True, status=200) sigs = pc.get_performance_signatures('mozilla-central') self.assertEqual(len(sigs), 3) self.assertEqual(sigs.get_signature_hashes(), ['signature1', 'signature2', 'signature3']) self.assertEqual(sigs.get_property_names(), set(['cheezburgers', 'hamburgers'])) self.assertEqual(sigs.get_property_values('cheezburgers'), set([1, 2]))
def handle(self, *args, **options): if options['server']: server_params = urlparse(options['server']) server_protocol = server_params.scheme server_host = server_params.netloc else: server_protocol = settings.TREEHERDER_REQUEST_PROTOCOL server_host = settings.TREEHERDER_REQUEST_HOST if not options['project']: raise CommandError("Must specify at least one project with " "--project") pc = PerfherderClient(protocol=server_protocol, host=server_host) option_collection_hash = pc.get_option_collection_hash() # print csv header print ','.join([ "project", "platform", "signature", "series", "testrun_id", "push_timestamp", "change", "percent change", "t-value", "revision" ]) for project in options['project']: if options['signature']: signatures = [options['signature']] signature_data = {} else: signature_data = pc.get_performance_signatures( project, time_interval=options['time_interval']) signatures = [] signatures_to_ignore = set() # if doing everything, only handle summary series for (signature, properties) in signature_data.iteritems(): signatures.append(signature) if 'subtest_signatures' in properties: # Don't alert on subtests which have a summary signatures_to_ignore.update( properties['subtest_signatures']) signatures = [ signature for signature in signatures if signature not in signatures_to_ignore ] for signature in signatures: series = pc.get_performance_series( project, signature, time_interval=options['time_interval']) series_properties = signature_data.get(signature) if not series_properties: series_properties = pc.get_performance_signature_properties( project, signature) if series_properties.get('subtest_signatures') is not None: meanvar = 'geomean' else: meanvar = 'mean' perf_data = [] for (result_set_id, timestamp, mean) in zip(series['result_set_id'], series['push_timestamp'], series[meanvar]): perf_data.append( PerfDatum(timestamp, mean, testrun_id=result_set_id)) ta = TalosAnalyzer() ta.addData(perf_data) for r in ta.analyze_t(): if r.state == 'regression': resultsets = pc.get_resultsets(project, id=r.testrun_id) if len(resultsets): revision = resultsets[0]['revision'] else: revision = '' initial_value = r.historical_stats['avg'] new_value = r.forward_stats['avg'] if initial_value != 0: pct_change = 100.0 * abs(new_value - initial_value ) / float(initial_value) else: pct_change = 0.0 delta = (new_value - initial_value) print ','.join( map(lambda v: str(v), [ project, series_properties['machine_platform'], signature, self._get_series_description( option_collection_hash, series_properties), r.testrun_id, r.push_timestamp, delta, pct_change, r.t, revision[0:12] ]))
def handle(self, *args, **options): if options['server']: server_params = urlparse(options['server']) server_protocol = server_params.scheme server_host = server_params.netloc else: server_protocol = settings.TREEHERDER_REQUEST_PROTOCOL server_host = settings.TREEHERDER_REQUEST_HOST if not options['project']: raise CommandError("Must specify at least one project with " "--project") pc = PerfherderClient(protocol=server_protocol, host=server_host) option_collection_hash = pc.get_option_collection_hash() # print csv header print ','.join(["project", "signature", "series", "testrun_id", "push_timestamp", "change", "percent change", "t-value", "revision"]) for project in options['project']: if options['signature']: signatures = [options['signature']] signature_data = {} else: signature_data = pc.get_performance_signatures( project, time_interval=options['time_interval']) signatures = [] # if doing everything, only handle summary series for (signature, properties) in signature_data.iteritems(): if 'subtest_signatures' in properties: signatures.append(signature) for signature in signatures: series = pc.get_performance_series( project, signature, time_interval=options['time_interval']) series_properties = signature_data.get(signature) if not series_properties: series_properties = pc.get_performance_signature_properties( project, signature) if series_properties.get('subtest_signatures') is not None: meanvar = 'geomean' else: meanvar = 'mean' perf_data = [] for (result_set_id, timestamp, mean) in zip( series['result_set_id'], series['push_timestamp'], series[meanvar]): perf_data.append(PerfDatum(timestamp, mean, testrun_id=result_set_id)) ta = TalosAnalyzer() ta.addData(perf_data) for r in ta.analyze_t(): if r.state == 'regression': resultsets = pc.get_resultsets(project, id=r.testrun_id) if len(resultsets): revision = resultsets[0]['revision'] else: revision = '' initial_value = r.historical_stats['avg'] new_value = r.forward_stats['avg'] if initial_value != 0: pct_change = 100.0 * abs(new_value - initial_value) / float(initial_value) else: pct_change = 0.0 delta = (new_value - initial_value) print ','.join(map(lambda v: str(v), [project, signature, self._get_series_description( option_collection_hash, series_properties), r.testrun_id, r.push_timestamp, delta, pct_change, r.t, revision[0:12]]))
def handle(self, *args, **options): if options['server']: server_params = urlparse(options['server']) server_protocol = server_params.scheme server_host = server_params.netloc else: server_protocol = settings.TREEHERDER_REQUEST_PROTOCOL server_host = settings.TREEHERDER_REQUEST_HOST if not options['project']: raise CommandError("Must specify at least one project with " "--project") pc = PerfherderClient(protocol=server_protocol, host=server_host) option_collection_hash = pc.get_option_collection_hash() # print csv header print ','.join(["project", "platform", "signature", "series", "testrun_id", "push_timestamp", "change", "percent change", "t-value", "revision"]) for project in options['project']: if options['signature']: signatures = [options['signature']] signature_data = pc.get_performance_signatures( project, signatures=signatures, interval=options['time_interval']) else: signature_data = pc.get_performance_signatures( project, interval=options['time_interval']) signatures = [] signatures_to_ignore = set() # if doing everything, only handle summary series for (signature, properties) in signature_data.iteritems(): signatures.append(signature) if 'subtest_signatures' in properties: # Don't alert on subtests which have a summary signatures_to_ignore.update(properties['subtest_signatures']) signatures = [signature for signature in signatures if signature not in signatures_to_ignore] for signature in signatures: series = pc.get_performance_data( project, signatures=signature, interval=options['time_interval'])[signature] series_properties = signature_data.get(signature) data = [] for (result_set_id, timestamp, value) in zip( series['result_set_id'], series['push_timestamp'], series['value']): data.append(Datum(timestamp, value, testrun_id=result_set_id)) for r in detect_changes(data): if r.state == 'regression': resultsets = pc.get_resultsets(project, id=r.testrun_id) if len(resultsets): revision = resultsets[0]['revision'] else: revision = '' initial_value = r.historical_stats['avg'] new_value = r.forward_stats['avg'] if initial_value != 0: pct_change = 100.0 * abs(new_value - initial_value) / float(initial_value) else: pct_change = 0.0 delta = (new_value - initial_value) print ','.join(map( lambda v: str(v), [project, series_properties['machine_platform'], signature, self._get_series_description( option_collection_hash, series_properties), r.testrun_id, r.push_timestamp, delta, pct_change, r.t, revision[0:12]]))
def handle(self, *args, **options): if not options['project']: raise CommandError("Must specify at least one project with " "--project") pc = PerfherderClient(server_url=options['server']) option_collection_hash = pc.get_option_collection_hash() # print csv header print ','.join(["project", "platform", "signature", "series", "testrun_id", "push_timestamp", "change", "percent change", "t-value", "revision"]) for project in options['project']: if options['signature']: signatures = [options['signature']] signature_data = pc.get_performance_signatures( project, signatures=signatures, interval=options['time_interval']) else: signature_data = pc.get_performance_signatures( project, interval=options['time_interval']) signatures = [] signatures_to_ignore = set() # if doing everything, only handle summary series for (signature, properties) in signature_data.iteritems(): signatures.append(signature) if 'subtest_signatures' in properties: # Don't alert on subtests which have a summary signatures_to_ignore.update(properties['subtest_signatures']) signatures = [signature for signature in signatures if signature not in signatures_to_ignore] for signature in signatures: series = pc.get_performance_data( project, signatures=signature, interval=options['time_interval'])[signature] series_properties = signature_data.get(signature) data = [] for (result_set_id, timestamp, value) in zip( series['result_set_id'], series['push_timestamp'], series['value']): data.append(Datum(timestamp, value, testrun_id=result_set_id)) for r in detect_changes(data): if r.state == 'regression': resultsets = pc.get_resultsets(project, id=r.testrun_id) if len(resultsets): revision = resultsets[0]['revision'] else: revision = '' initial_value = r.historical_stats['avg'] new_value = r.forward_stats['avg'] if initial_value != 0: pct_change = 100.0 * abs(new_value - initial_value) / float(initial_value) else: pct_change = 0.0 delta = (new_value - initial_value) print ','.join(map( lambda v: str(v), [project, series_properties['machine_platform'], signature, self._get_series_description( option_collection_hash, series_properties), r.testrun_id, r.push_timestamp, delta, pct_change, r.t, revision[0:12]]))