def process_commandline_request(self, options): """Implements CommandHandler.""" title = options.get("name", None) if not title: raise ValueError("No name provided.") api = datadog_service.make_datadog_service(options).api if self.__type_name == "screenboard": list_method = api.Screenboard.get_all get_method = api.Screenboard.get result_key = "screenboards" elif self.__type_name == "timeboard": list_method = api.Timeboard.get_all get_method = api.Timeboard.get result_key = "dashes" else: raise ValueError( 'Unknown datadog artifact "{0}". ' ' Either "screenboard" or "timeboard"'.format(self.__type_name) ) all_list = list_method() artifact_id = None for artifact in all_list[result_key]: if artifact["title"] == title: artifact_id = artifact["id"] break if artifact_id is None: raise ValueError('Could not find title "{0}"'.format(title)) json_text = json.JSONEncoder(indent=2).encode(get_method(artifact_id)) self.output(options, json_text)
def make_metric_service(self, options): """Create the metric service we'll use to publish metrics to a backend. """ if options['stackdriver']: return stackdriver_service.make_service(options) if options['datadog']: return datadog_service.make_datadog_service(options) raise ValueError('No metric service specified.')
def process_commandline_request(self, options): """Implements CommandHandler.""" datadog = datadog_service.make_datadog_service(options) if self.__type_name == 'screenboard': method = datadog.api.Screenboard.get_all elif self.__type_name == 'timeboard': method = datadog.api.Timeboard.get_all else: raise ValueError('Unknown datadog artifact "{0}". ' ' Either "screenboard" or "timeboard"' .format(self.__type_name)) json_text = json.JSONEncoder(indent=2).encode(method()) self.output(options, json_text)
def process_commandline_request(self, options): """Implements CommandHandler.""" datadog = datadog_service.make_datadog_service(options) if self.__type_name == "screenboard": method = datadog.api.Screenboard.get_all elif self.__type_name == "timeboard": method = datadog.api.Timeboard.get_all else: raise ValueError( 'Unknown datadog artifact "{0}". ' ' Either "screenboard" or "timeboard"'.format(self.__type_name) ) json_text = json.JSONEncoder(indent=2).encode(method()) self.output(options, json_text)
def test_initialize_from_dd_agent_config(self, mock_initialize): options = dict() data = ["[Main]", "#api_key: COMMENT", "api_key: FOUND_KEY", "hostname: FOUND_HOST"] with tempfile.NamedTemporaryFile() as config: config.write('\n'.join(data)) config.flush() options['dd_agent_config'] = config.name service = datadog_service.make_datadog_service(options) self.assertIsNotNone(service) self.assertIsNotNone(service.api) mock_initialize.assert_called_with( api_key='FOUND_KEY', app_key=None, host_name='FOUND_HOST')
def test_initialize_from_dd_agent_config(self, mock_initialize): options = {'dd_agent_config': 'testCONFIG'} with patch('datadog_service.open', mock.mock_open(read_data='#api_key: COMMENT\n' 'api_key: FOUND_KEY\n' 'hostname: FOUND_HOST\n'), create=True) as mock_patch: service = datadog_service.make_datadog_service(options) mock_patch.assert_called_with('testCONFIG', 'r') self.assertIsNotNone(service) self.assertIsNotNone(service.api) # initialize on demand mock_initialize.assert_called_with(api_key='FOUND_KEY', app_key=None, host_name='FOUND_HOST')
def test_initialize_from_options(self, mock_initialize): options = {'datadog': {}, 'dd_agent_config': ''} data = [ "[Main]", "api_key: testApi", "app_key: testApi", "hostname: testHost" ] with tempfile.NamedTemporaryFile() as config: config.write('\n'.join(data)) config.flush() options['dd_agent_config'] = config.name service = datadog_service.make_datadog_service(options) self.assertIsNotNone(service) self.assertIsNotNone(service.api) # initialize on demand mock_initialize.assert_called_with(api_key='testApi', app_key='testApi', host_name='testHost')
def test_initialize_from_localhost_config( self, mock_initialize, mock_getfqdn): options = dict() data = ["[Main]", "api_key: FOUND_KEY"] mock_getfqdn.return_value = 'testFQDN' with tempfile.NamedTemporaryFile() as config: config.write('\n'.join(data)) config.flush() options['dd_agent_config'] = config.name options['datadog_host'] = "wrongHOST" service = datadog_service.make_datadog_service(options) mock_getfqdn.assert_called_with('wrongHOST') self.assertIsNotNone(service) self.assertIsNotNone(service.api) # initialize on demand mock_initialize.assert_called_with( api_key='FOUND_KEY', app_key=None, host_name='testFQDN')
def make_metric_services(self, options): """Create the metric services we'll use to publish metrics to a backend. """ service_list = [] if options['stackdriver']: service_list.append(stackdriver_service.make_service(options)) if options['datadog']: service_list.append(datadog_service.make_datadog_service(options)) if options['prometheus']: service_list.append(prometheus_service.make_service(options)) # This endpoint will be conditionally added only when prometheus is # configured. It doesnt have to be like this, but might as well to # avoid exposing it if it isnt needed. self.command_handlers.append(prometheus_service.ScrapeHandler()) if service_list: return service_list raise ValueError('No metric service specified.')
def test_initialize_from_localhost_config(self, mock_initialize, mock_getfqdn): options = { 'dd_agent_config': 'testCONFIG', 'datadog_host': 'wrongHOST' } mock_getfqdn.return_value = 'testFQDN' with patch('datadog_service.open', mock.mock_open(read_data='#api_key: COMMENT\n' 'api_key: FOUND_KEY\n'), create=True) as mock_patch: service = datadog_service.make_datadog_service(options) mock_patch.assert_called_with('testCONFIG', 'r') mock_getfqdn.assert_called_with('wrongHOST') self.assertIsNotNone(service) self.assertIsNotNone(service.api) # initialize on demand mock_initialize.assert_called_with(api_key='FOUND_KEY', app_key=None, host_name='testFQDN')
def service_generation_helper(config_data=[], datadog_options={}, spinnaker_monitoring_options={}): """ This utility lets you build a DatadogMetricsService in a DRY fashion for all the tests below. You can inject additional parameters anywhere. It guarantees the base invariants - api_key present, 'datadog' and 'dd_agent_config' keys exist in required arguments, and that 'dd_agent_config' is correctly parsed as a file. These are guarded against within the DatadogService. """ data = ["[Main]", "api_key: FOUND_KEY"] + config_data options = {'datadog': datadog_options, 'dd_agent_config': ''} options.update(spinnaker_monitoring_options) with tempfile.NamedTemporaryFile() as config: config.write('\n'.join(data)) config.flush() options['dd_agent_config'] = config.name service = datadog_service.make_datadog_service(options) return service
def test_publish_metrics(self, mock_initialize, mock_xform): options = {'dd_agent_config': 'testCONFIG', 'datadog_host': 'testHost'} with patch('datadog_service.open', mock.mock_open(read_data='api_key: FOUND_KEY\n')): service = datadog_service.make_datadog_service(options) bogus_data = [i for i in range(0, service.MAX_BATCH * 2)] for test_case in [ (service.MAX_BATCH - 1, [bogus_data[0:service.MAX_BATCH - 1]]), (service.MAX_BATCH, [bogus_data[0:service.MAX_BATCH]]), (service.MAX_BATCH + 1, [ bogus_data[0:service.MAX_BATCH], bogus_data[service.MAX_BATCH:service.MAX_BATCH + 1] ]) ]: mock_xform.side_effect = ( lambda ignore_metrics, ignore_fn, result: result.extend( bogus_data[0:test_case[0]])) with patch('datadog_service.datadog.api.Metric.send') as mock_send: self.assertEquals(test_case[0], service.publish_metrics(service_metrics={})) self.assertEquals(mock_send.call_args_list, [mock.call(batch) for batch in test_case[1]])
def process_commandline_request(self, options): """Implements CommandHandler.""" title = options.get('name', None) if not title: raise ValueError('No name provided.') transform_json_obj = None api = datadog_service.make_datadog_service(options).api if self.__type_name == 'screenboard': list_method = api.Screenboard.get_all get_method = api.Screenboard.get result_key = 'screenboards' elif self.__type_name == 'timeboard': list_method = api.Timeboard.get_all get_method = api.Timeboard.get result_key = 'dashes' transform_json_obj = self.__unpack_dashboard else: raise ValueError('Unknown datadog artifact "{0}". ' ' Either "screenboard" or "timeboard"' .format(self.__type_name)) all_list = list_method() artifact_id = None for artifact in all_list[result_key]: if artifact['title'] == title: artifact_id = artifact['id'] break if artifact_id is None: raise ValueError('Could not find title "{0}"'.format(title)) json_obj = get_method(artifact_id) if transform_json_obj: json_obj = transform_json_obj(json_obj) json_text = json.JSONEncoder(indent=2).encode(json_obj) self.output(options, json_text)
def test_publish_metrics(self, mock_initialize, mock_xform): data = ["[Main]", "api_key: FOUND_KEY"] options = dict() with tempfile.NamedTemporaryFile() as config: config.write('\n'.join(data)) config.flush() options['dd_agent_config'] = config.name options['datadog_host'] = 'testHost' service = datadog_service.make_datadog_service(options) bogus_data = [i for i in range(0, service.MAX_BATCH * 2)] for test_case in [ (service.MAX_BATCH - 1, [bogus_data[0:service.MAX_BATCH-1]]), (service.MAX_BATCH, [bogus_data[0:service.MAX_BATCH]]), (service.MAX_BATCH + 1, [bogus_data[0:service.MAX_BATCH], bogus_data[service.MAX_BATCH:service.MAX_BATCH + 1]])]: mock_xform.side_effect = (lambda ignore_metrics, ignore_fn, result: result.extend(bogus_data[0:test_case[0]])) with patch('datadog_service.datadog.api.Metric.send') as mock_send: self.assertEquals( test_case[0], service.publish_metrics(service_metrics={})) self.assertEquals(mock_send.call_args_list, [mock.call(batch) for batch in test_case[1]])