def test_get_performance_signatures(self): pc = PerfherderClient() url = pc._get_endpoint_url(pc.PERFORMANCE_SIGNATURES_ENDPOINT, project='mozilla-central') content = { 'signature1': { 'cheezburgers': 1 }, 'signature2': { 'hamburgers': 2 }, 'signature3': { 'cheezburgers': 2 } } responses.add(responses.GET, url, json=content, match_querystring=True, status=200) sigs = pc.get_performance_signatures('mozilla-central') self.assertEqual(len(sigs), 3) self.assertEqual(sigs.get_signature_hashes(), ['signature1', 'signature2', 'signature3']) self.assertEqual(sigs.get_property_names(), set(['cheezburgers', 'hamburgers'])) self.assertEqual(sigs.get_property_values('cheezburgers'), set([1, 2]))
def test_get_performance_data(self): pc = PerfherderClient() url = '{}?{}'.format( pc._get_endpoint_url(pc.PERFORMANCE_DATA_ENDPOINT, project='mozilla-central'), 'signatures=signature1&signatures=signature2') content = { 'signature1': [{ 'value': 1 }, { 'value': 2 }], 'signature2': [{ 'value': 2 }, { 'value': 1 }] } responses.add(responses.GET, url, json=content, match_querystring=True, status=200) series_list = pc.get_performance_data( 'mozilla-central', signatures=['signature1', 'signature2']) self.assertEqual(len(series_list), 2) self.assertEqual(series_list['signature1']['value'], [1, 2]) self.assertEqual(series_list['signature2']['value'], [2, 1])
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Need to (only) specify project/branch") project = args[0] server_params = urlparse(options['server']) time_interval = options['time_interval'] pc = PerfherderClient(protocol=server_params.scheme, host=server_params.netloc) signatures = pc.get_performance_signatures( project, interval=time_interval) if options['filter_props']: for kv in options['filter_props']: if ':' not in kv or len(kv) < 3: raise CommandError("Must specify --filter-props as " "'key:value'") k, v = kv.split(':') signatures = signatures.filter((k, v)) with concurrent.futures.ProcessPoolExecutor( options['num_workers']) as executor: futures = [] # add signatures without parents first, then those with parents with_parents = [] for signature_hash in signatures.get_signature_hashes(): if 'parent_signature' in signatures[signature_hash]: with_parents.append(signature_hash) else: futures.append(executor.submit(_add_series, pc, project, signature_hash, signatures[signature_hash], options['verbosity'], time_interval=time_interval)) for signature_hash in with_parents: parent_hash = signatures[signature_hash]['parent_signature'] futures.append(executor.submit(_add_series, pc, project, signature_hash, signatures[signature_hash], options['verbosity'], time_interval=time_interval, parent_hash=parent_hash)) for future in futures: try: future.result() except Exception as e: self.stderr.write("FAIL: {}".format(e)) # shutdown any pending tasks and exit (if something # is in progress, no wait to stop it) executor.shutdown(wait=False) for future in futures: future.cancel() raise CommandError( "Failed to import performance data: {}".format(e))
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Need to (only) specify project/branch") project = args[0] time_interval = options['time_interval'] pc = PerfherderClient(server_url=options['server']) signatures = pc.get_performance_signatures(project, interval=time_interval) if options['filter_props']: for kv in options['filter_props']: if ':' not in kv or len(kv) < 3: raise CommandError("Must specify --filter-props as " "'key:value'") k, v = kv.split(':') signatures = signatures.filter((k, v)) with concurrent.futures.ProcessPoolExecutor( options['num_workers']) as executor: futures = [] # add signatures without parents first, then those with parents with_parents = [] for signature_hash in signatures.get_signature_hashes(): if 'parent_signature' in signatures[signature_hash]: with_parents.append(signature_hash) else: futures.append( executor.submit(_add_series, pc, project, signature_hash, signatures[signature_hash], options['verbosity'], time_interval=time_interval)) for signature_hash in with_parents: parent_hash = signatures[signature_hash]['parent_signature'] futures.append( executor.submit(_add_series, pc, project, signature_hash, signatures[signature_hash], options['verbosity'], time_interval=time_interval, parent_hash=parent_hash)) for future in futures: try: future.result() except Exception as e: self.stderr.write("FAIL: {}".format(e)) # shutdown any pending tasks and exit (if something # is in progress, no wait to stop it) executor.shutdown(wait=False) for future in futures: future.cancel() raise CommandError( "Failed to import performance data: {}".format(e))
def test_get_performance_signature_properties(self, mock_get): mock_get.return_value = self._get_mock_response( [{'cheezburgers': 1, 'hamburgers': 2}]) pc = PerfherderClient() propdict = pc.get_performance_signature_properties('mozilla-central', 'signature1') self.assertEqual({'cheezburgers': 1, 'hamburgers': 2}, propdict)
def _add_series(server_params, project, time_intervals, signature_hash, signature_props, mysql_debug, verbose): with JobsModel(project) as jm: jm.DEBUG = mysql_debug if verbose: print(signature_hash) jm.set_series_signature(signature_hash, signature_props) for time_interval in time_intervals: pc = PerfherderClient(protocol=server_params.scheme, host=server_params.netloc) series = pc.get_performance_series(project, signature_hash, time_interval=time_interval) jm.store_performance_series(time_interval, "talos_data", str(signature_hash), series)
def test_get_performance_signatures(self, mock_get): mock_get.return_value = self._get_mock_response( {"signature1": {"cheezburgers": 1}, "signature2": {"hamburgers": 2}, "signature3": {"cheezburgers": 2}} ) pc = PerfherderClient() sigs = pc.get_performance_signatures("mozilla-central") self.assertEqual(len(sigs), 3) self.assertEqual(sigs.get_signature_hashes(), ["signature1", "signature2", "signature3"]) self.assertEqual(sigs.get_property_names(), set(["cheezburgers", "hamburgers"])) self.assertEqual(sigs.get_property_values("cheezburgers"), set([1, 2]))
def test_get_performance_data(self, mock_get): mock_get.return_value = self._get_mock_response({ 'signature1': [{'value': 1}, {'value': 2}], 'signature2': [{'value': 2}, {'value': 1}] }) pc = PerfherderClient() series_list = pc.get_performance_data('mozilla-central', signatures=['signature1', 'signature2']) self.assertEqual(len(series_list), 2) self.assertEqual(series_list['signature1']['value'], [1, 2]) self.assertEqual(series_list['signature2']['value'], [2, 1])
def test_get_performance_series_list(self, mock_get): mock_get.return_value = self._get_mock_response( [ {"series_signature": "signature1", "blob": [{"geomean": 1}, {"geomean": 2}]}, {"series_signature": "signature2", "blob": [{"geomean": 2}, {"geomean": 1}]}, ] ) pc = PerfherderClient() series_list = pc.get_performance_series_list("mozilla-central", ["signature1", "signature2"]) self.assertEqual(len(series_list), 2) self.assertEqual(series_list[0]["geomean"], [1, 2]) self.assertEqual(series_list[1]["geomean"], [2, 1])
def test_get_performance_series_list(self, mock_get): mock_get.return_value = self._get_mock_response( [{'series_signature': 'signature1', 'blob': [{'geomean': 1}, {'geomean': 2}]}, {'series_signature': 'signature2', 'blob': [{'geomean': 2}, {'geomean': 1}]}]) pc = PerfherderClient() series_list = pc.get_performance_series_list('mozilla-central', ['signature1', 'signature2']) self.assertEqual(len(series_list), 2) self.assertEqual(series_list[0]['geomean'], [1, 2]) self.assertEqual(series_list[1]['geomean'], [2, 1])
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Need to (only) specify project/branch") project = args[0] server_params = urlparse(options['server']) pc = PerfherderClient(protocol=server_params.scheme, host=server_params.netloc) signatures = pc.get_performance_signatures( project, time_interval=PerformanceTimeInterval.NINETY_DAYS) if options['filter_props']: for kv in options['filter_props']: if ':' not in kv or len(kv) < 3: raise CommandError("Must specify --filter-props as " "'key:value'") k, v = kv.split(':') signatures = signatures.filter((k, v)) if options['time_interval'] is None: time_intervals = PerformanceTimeInterval.all_valid_time_intervals() else: time_intervals = [options['time_interval']] with concurrent.futures.ProcessPoolExecutor( options['num_workers']) as executor: futures = [] for signature_hash in signatures.get_signature_hashes(): futures.append(executor.submit(_add_series, server_params, project, time_intervals, signature_hash, signatures[signature_hash], options['mysql_debug'], options['verbose'])) for future in futures: try: future.result() except Exception, e: self.stderr.write("FAIL: {}".format(e)) # shutdown any pending tasks and exit (if something # is in progress, no wait to stop it) executor.shutdown(wait=False) for future in futures: future.cancel() raise CommandError( "Failed to import performance data: {}".format(e))
def test_get_performance_signatures(self, mock_get): mock_get.return_value = self._get_mock_response( {'signature1': {'cheezburgers': 1}, 'signature2': {'hamburgers': 2}, 'signature3': {'cheezburgers': 2}}) pc = PerfherderClient() sigs = pc.get_performance_signatures('mozilla-central') self.assertEqual(len(sigs), 3) self.assertEqual(sigs.get_signature_hashes(), ['signature1', 'signature2', 'signature3']) self.assertEqual(sigs.get_property_names(), set(['cheezburgers', 'hamburgers'])) self.assertEqual(sigs.get_property_values('cheezburgers'), set([1, 2]))
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Need to (only) specify project/branch") project = args[0] server_params = urlparse(options['server']) pc = PerfherderClient(protocol=server_params.scheme, host=server_params.netloc) signatures = pc.get_performance_signatures( project, time_interval=PerformanceTimeInterval.NINETY_DAYS) if options['filter_props']: for kv in options['filter_props']: if ':' not in kv or len(kv) < 3: raise CommandError("Must specify --filter-props as " "'key:value'") k, v = kv.split(':') signatures = signatures.filter((k, v)) if options['time_interval'] is None: time_intervals = PerformanceTimeInterval.all_valid_time_intervals() else: time_intervals = [options['time_interval']] with concurrent.futures.ProcessPoolExecutor( options['num_workers']) as executor: futures = [] for signature_hash in signatures.get_signature_hashes(): futures.append( executor.submit(_add_series, server_params, project, time_intervals, signature_hash, signatures[signature_hash], options['mysql_debug'], options['verbose'])) for future in futures: try: future.result() except Exception, e: self.stderr.write("FAIL: {}".format(e)) # shutdown any pending tasks and exit (if something # is in progress, no wait to stop it) executor.shutdown(wait=False) for future in futures: future.cancel() raise CommandError( "Failed to import performance data: {}".format(e))
def test_get_performance_signature_properties_no_results(self, mock_get): mock_get.return_value = self._get_mock_response( []) pc = PerfherderClient() self.assertRaises(TreeherderClientError, pc.get_performance_signature_properties, 'mozilla-central', 'signature1')
def _add_series(server_params, project, time_intervals, signature_hash, signature_props, mysql_debug, verbose): with JobsModel(project) as jm: jm.DEBUG = mysql_debug if verbose: print(signature_hash) jm.set_series_signature(signature_hash, signature_props) for time_interval in time_intervals: pc = PerfherderClient(protocol=server_params.scheme, host=server_params.netloc) series = pc.get_performance_series(project, signature_hash, time_interval=time_interval) jm.store_performance_series(time_interval, 'talos_data', str(signature_hash), series)
def test_get_performance_data(self): pc = PerfherderClient() url = '{}?{}'.format(pc._get_project_uri('mozilla-central', pc.PERFORMANCE_DATA_ENDPOINT), 'signatures=signature1&signatures=signature2') content = { 'signature1': [{'value': 1}, {'value': 2}], 'signature2': [{'value': 2}, {'value': 1}] } responses.add(responses.GET, url, json=content, match_querystring=True, status=200) series_list = pc.get_performance_data('mozilla-central', signatures=['signature1', 'signature2']) self.assertEqual(len(series_list), 2) self.assertEqual(series_list['signature1']['value'], [1, 2]) self.assertEqual(series_list['signature2']['value'], [2, 1])
def test_get_performance_signatures(self): pc = PerfherderClient() url = pc._get_project_uri('mozilla-central', pc.PERFORMANCE_SIGNATURES_ENDPOINT) content = { 'signature1': {'cheezburgers': 1}, 'signature2': {'hamburgers': 2}, 'signature3': {'cheezburgers': 2} } responses.add(responses.GET, url, json=content, match_querystring=True, status=200) sigs = pc.get_performance_signatures('mozilla-central') self.assertEqual(len(sigs), 3) self.assertEqual(sigs.get_signature_hashes(), ['signature1', 'signature2', 'signature3']) self.assertEqual(sigs.get_property_names(), set(['cheezburgers', 'hamburgers'])) self.assertEqual(sigs.get_property_values('cheezburgers'), set([1, 2]))
def test_get_performance_series_list_improper_length(self, mock_get): # returning 1 when we should return 2 mock_get.return_value = self._get_mock_response( [{'series_signature': 'signature1', 'blob': [{'geomean': 1}]}]) pc = PerfherderClient() self.assertRaises(TreeherderClientError, pc.get_performance_series_list, 'mozilla-central', ['signature1', 'signature2'])
def test_get_performance_data(self, mock_get): mock_get.return_value = self._get_mock_response({ 'signature1': [{ 'value': 1 }, { 'value': 2 }], 'signature2': [{ 'value': 2 }, { 'value': 1 }] }) pc = PerfherderClient() series_list = pc.get_performance_data( 'mozilla-central', signatures=['signature1', 'signature2']) self.assertEqual(len(series_list), 2) self.assertEqual(series_list['signature1']['value'], [1, 2]) self.assertEqual(series_list['signature2']['value'], [2, 1])
def handle(self, *args, **options): if options['server']: server_params = urlparse(options['server']) server_protocol = server_params.scheme server_host = server_params.netloc else: server_protocol = settings.TREEHERDER_REQUEST_PROTOCOL server_host = settings.TREEHERDER_REQUEST_HOST if not options['project']: raise CommandError("Must specify at least one project with " "--project") pc = PerfherderClient(protocol=server_protocol, host=server_host) option_collection_hash = pc.get_option_collection_hash() # print csv header print ','.join([ "project", "platform", "signature", "series", "testrun_id", "push_timestamp", "change", "percent change", "t-value", "revision" ]) for project in options['project']: if options['signature']: signatures = [options['signature']] signature_data = {} else: signature_data = pc.get_performance_signatures( project, time_interval=options['time_interval']) signatures = [] signatures_to_ignore = set() # if doing everything, only handle summary series for (signature, properties) in signature_data.iteritems(): signatures.append(signature) if 'subtest_signatures' in properties: # Don't alert on subtests which have a summary signatures_to_ignore.update( properties['subtest_signatures']) signatures = [ signature for signature in signatures if signature not in signatures_to_ignore ] for signature in signatures: series = pc.get_performance_series( project, signature, time_interval=options['time_interval']) series_properties = signature_data.get(signature) if not series_properties: series_properties = pc.get_performance_signature_properties( project, signature) if series_properties.get('subtest_signatures') is not None: meanvar = 'geomean' else: meanvar = 'mean' perf_data = [] for (result_set_id, timestamp, mean) in zip(series['result_set_id'], series['push_timestamp'], series[meanvar]): perf_data.append( PerfDatum(timestamp, mean, testrun_id=result_set_id)) ta = TalosAnalyzer() ta.addData(perf_data) for r in ta.analyze_t(): if r.state == 'regression': resultsets = pc.get_resultsets(project, id=r.testrun_id) if len(resultsets): revision = resultsets[0]['revision'] else: revision = '' initial_value = r.historical_stats['avg'] new_value = r.forward_stats['avg'] if initial_value != 0: pct_change = 100.0 * abs(new_value - initial_value ) / float(initial_value) else: pct_change = 0.0 delta = (new_value - initial_value) print ','.join( map(lambda v: str(v), [ project, series_properties['machine_platform'], signature, self._get_series_description( option_collection_hash, series_properties), r.testrun_id, r.push_timestamp, delta, pct_change, r.t, revision[0:12] ]))
def test_get_performance_signature_properties(self, mock_get): mock_get.return_value = self._get_mock_response([{"cheezburgers": 1, "hamburgers": 2}]) pc = PerfherderClient() propdict = pc.get_performance_signature_properties("mozilla-central", "signature1") self.assertEqual({"cheezburgers": 1, "hamburgers": 2}, propdict)
def handle(self, *args, **options): if options['server']: server_params = urlparse(options['server']) server_protocol = server_params.scheme server_host = server_params.netloc else: server_protocol = settings.TREEHERDER_REQUEST_PROTOCOL server_host = settings.TREEHERDER_REQUEST_HOST if not options['project']: raise CommandError("Must specify at least one project with " "--project") pc = PerfherderClient(protocol=server_protocol, host=server_host) option_collection_hash = pc.get_option_collection_hash() # print csv header print ','.join(["project", "signature", "series", "testrun_id", "push_timestamp", "change", "percent change", "t-value", "revision"]) for project in options['project']: if options['signature']: signatures = [options['signature']] signature_data = {} else: signature_data = pc.get_performance_signatures( project, time_interval=options['time_interval']) signatures = [] # if doing everything, only handle summary series for (signature, properties) in signature_data.iteritems(): if 'subtest_signatures' in properties: signatures.append(signature) for signature in signatures: series = pc.get_performance_series( project, signature, time_interval=options['time_interval']) series_properties = signature_data.get(signature) if not series_properties: series_properties = pc.get_performance_signature_properties( project, signature) if series_properties.get('subtest_signatures') is not None: meanvar = 'geomean' else: meanvar = 'mean' perf_data = [] for (result_set_id, timestamp, mean) in zip( series['result_set_id'], series['push_timestamp'], series[meanvar]): perf_data.append(PerfDatum(timestamp, mean, testrun_id=result_set_id)) ta = TalosAnalyzer() ta.addData(perf_data) for r in ta.analyze_t(): if r.state == 'regression': resultsets = pc.get_resultsets(project, id=r.testrun_id) if len(resultsets): revision = resultsets[0]['revision'] else: revision = '' initial_value = r.historical_stats['avg'] new_value = r.forward_stats['avg'] if initial_value != 0: pct_change = 100.0 * abs(new_value - initial_value) / float(initial_value) else: pct_change = 0.0 delta = (new_value - initial_value) print ','.join(map(lambda v: str(v), [project, signature, self._get_series_description( option_collection_hash, series_properties), r.testrun_id, r.push_timestamp, delta, pct_change, r.t, revision[0:12]]))
def handle(self, *args, **options): if options['server']: server_params = urlparse(options['server']) server_protocol = server_params.scheme server_host = server_params.netloc else: server_protocol = settings.TREEHERDER_REQUEST_PROTOCOL server_host = settings.TREEHERDER_REQUEST_HOST if not options['project']: raise CommandError("Must specify at least one project with " "--project") pc = PerfherderClient(protocol=server_protocol, host=server_host) option_collection_hash = pc.get_option_collection_hash() # print csv header print ','.join(["project", "platform", "signature", "series", "testrun_id", "push_timestamp", "change", "percent change", "t-value", "revision"]) for project in options['project']: if options['signature']: signatures = [options['signature']] signature_data = pc.get_performance_signatures( project, signatures=signatures, interval=options['time_interval']) else: signature_data = pc.get_performance_signatures( project, interval=options['time_interval']) signatures = [] signatures_to_ignore = set() # if doing everything, only handle summary series for (signature, properties) in signature_data.iteritems(): signatures.append(signature) if 'subtest_signatures' in properties: # Don't alert on subtests which have a summary signatures_to_ignore.update(properties['subtest_signatures']) signatures = [signature for signature in signatures if signature not in signatures_to_ignore] for signature in signatures: series = pc.get_performance_data( project, signatures=signature, interval=options['time_interval'])[signature] series_properties = signature_data.get(signature) data = [] for (result_set_id, timestamp, value) in zip( series['result_set_id'], series['push_timestamp'], series['value']): data.append(Datum(timestamp, value, testrun_id=result_set_id)) for r in detect_changes(data): if r.state == 'regression': resultsets = pc.get_resultsets(project, id=r.testrun_id) if len(resultsets): revision = resultsets[0]['revision'] else: revision = '' initial_value = r.historical_stats['avg'] new_value = r.forward_stats['avg'] if initial_value != 0: pct_change = 100.0 * abs(new_value - initial_value) / float(initial_value) else: pct_change = 0.0 delta = (new_value - initial_value) print ','.join(map( lambda v: str(v), [project, series_properties['machine_platform'], signature, self._get_series_description( option_collection_hash, series_properties), r.testrun_id, r.push_timestamp, delta, pct_change, r.t, revision[0:12]]))
def handle(self, *args, **options): if not options['project']: raise CommandError("Must specify at least one project with " "--project") pc = PerfherderClient(server_url=options['server']) option_collection_hash = pc.get_option_collection_hash() # print csv header print ','.join(["project", "platform", "signature", "series", "testrun_id", "push_timestamp", "change", "percent change", "t-value", "revision"]) for project in options['project']: if options['signature']: signatures = [options['signature']] signature_data = pc.get_performance_signatures( project, signatures=signatures, interval=options['time_interval']) else: signature_data = pc.get_performance_signatures( project, interval=options['time_interval']) signatures = [] signatures_to_ignore = set() # if doing everything, only handle summary series for (signature, properties) in signature_data.iteritems(): signatures.append(signature) if 'subtest_signatures' in properties: # Don't alert on subtests which have a summary signatures_to_ignore.update(properties['subtest_signatures']) signatures = [signature for signature in signatures if signature not in signatures_to_ignore] for signature in signatures: series = pc.get_performance_data( project, signatures=signature, interval=options['time_interval'])[signature] series_properties = signature_data.get(signature) data = [] for (result_set_id, timestamp, value) in zip( series['result_set_id'], series['push_timestamp'], series['value']): data.append(Datum(timestamp, value, testrun_id=result_set_id)) for r in detect_changes(data): if r.state == 'regression': resultsets = pc.get_resultsets(project, id=r.testrun_id) if len(resultsets): revision = resultsets[0]['revision'] else: revision = '' initial_value = r.historical_stats['avg'] new_value = r.forward_stats['avg'] if initial_value != 0: pct_change = 100.0 * abs(new_value - initial_value) / float(initial_value) else: pct_change = 0.0 delta = (new_value - initial_value) print ','.join(map( lambda v: str(v), [project, series_properties['machine_platform'], signature, self._get_series_description( option_collection_hash, series_properties), r.testrun_id, r.push_timestamp, delta, pct_change, r.t, revision[0:12]]))