def test_collect_statsbeat_metrics_eu(self, thread_mock, stats_mock): # pylint: disable=protected-access cs = "InstrumentationKey=1aa11111-bbbb-1ccc-8ddd-eeeeffff3333;IngestionEndpoint=https://northeurope-0.in.applicationinsights.azure.com/" # noqa: E501 eu = Options(connection_string=cs) with mock.patch.dict( os.environ, { "APPLICATION_INSIGHTS_STATS_CONNECTION_STRING": "", }): statsbeat_metrics.collect_statsbeat_metrics(eu) self.assertTrue( isinstance(statsbeat_metrics._STATSBEAT_METRICS, statsbeat_metrics._AzureStatsbeatMetricsProducer)) self.assertTrue( isinstance( statsbeat_metrics._STATSBEAT_EXPORTER, MetricsExporter, )) self.assertEqual( statsbeat_metrics._STATSBEAT_EXPORTER.options. instrumentation_key, # noqa: E501 _DEFAULT_EU_STATS_CONNECTION_STRING.split(";")[0].split("=")[ 1] # noqa: E501 ) self.assertEqual( statsbeat_metrics._STATSBEAT_EXPORTER.options.endpoint, _DEFAULT_EU_STATS_CONNECTION_STRING.split(";")[1].split("=")[ 1] # noqa: E501 )
def test_transmission_206_500(self): exporter = trace_exporter.AzureExporter( Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', storage_path=os.path.join(TEST_FOLDER, '206.500'), ), ) exporter.storage.put([1, 2, 3, 4, 5]) with mock.patch('requests.post') as post: post.return_value = MockResponse(206, json.dumps({ 'itemsReceived': 5, 'itemsAccepted': 3, 'errors': [ { 'index': 0, 'statusCode': 400, 'message': '', }, { 'index': 2, 'statusCode': 500, 'message': 'Internal Server Error', }, ], })) exporter._transmission_routine() self.assertEqual(len(os.listdir(exporter.storage.path)), 1) self.assertEqual(exporter.storage.get().get(), (3,))
def test_statsbeat_206_no_retry(self): _requests_map.clear() mixin = TransportMixin() mixin.options = Options() with mock.patch('requests.post') as post: post.return_value = MockResponse( 206, json.dumps({ 'itemsReceived': 3, 'itemsAccepted': 2, 'errors': [ { 'index': 0, 'statusCode': 400, 'message': '', }, ], })) result = mixin._transmit([1, 2, 3]) self.assertEqual(len(_requests_map), 3) self.assertIsNotNone(_requests_map['duration']) self.assertEqual(_requests_map['count'], 1) self.assertEqual(_requests_map['failure'], 1) self.assertEqual(result, -206) _requests_map.clear()
def test_create_envelope(self): metric = create_metric() options = Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd') exporter = metrics_exporter.MetricsExporter(options) value = metric.time_series[0].points[0].value.value data_point = DataPoint(ns=metric.descriptor.name, name=metric.descriptor.name, value=value) timestamp = datetime(2019, 3, 20, 21, 34, 0, 537954) properties = {'url': 'website.com'} envelope = exporter.create_envelope(data_point, timestamp, properties) self.assertTrue('iKey' in envelope) self.assertEqual(envelope.iKey, options.instrumentation_key) self.assertTrue('tags' in envelope) self.assertTrue('time' in envelope) self.assertEqual(envelope.time, timestamp.isoformat()) self.assertTrue('name' in envelope) self.assertEqual(envelope.name, 'Microsoft.ApplicationInsights.Metric') self.assertTrue('data' in envelope) self.assertTrue('baseData' in envelope.data) self.assertTrue('baseType' in envelope.data) self.assertTrue('metrics' in envelope.data.baseData) self.assertTrue('properties' in envelope.data.baseData) self.assertEqual(envelope.data.baseData.properties, properties)
def test_constructor_invalid_batch_size(self): options = Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', max_batch_size=-1) self.assertRaises( ValueError, lambda: metrics_exporter.MetricsExporter(options=options))
def test_export_metrics_empty(self, requests_mock): options = Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd') exporter = metrics_exporter.MetricsExporter(options) exporter.export_metrics([]) self.assertEqual(len(requests_mock.call_args_list), 0)
def test_transmit_client_error(self, logger_mock): options = Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd') exporter = metrics_exporter.MetricsExporter(options) exporter._transmit_without_retry(mock.Mock()) self.assertEqual(len(logger_mock.call_args_list), 1)
def __init__(self, **options): super(BaseLogHandler, self).__init__() self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) if not 0 <= self.options.logging_sampling_rate <= 1: raise ValueError('Sampling must be in the range: [0,1]') self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self.storage = None if self.options.enable_local_storage: self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, source=self.__class__.__name__, ) self._telemetry_processors = [] self.addFilter(SamplingFilter(self.options.logging_sampling_rate)) self._queue = Queue(capacity=self.options.queue_capacity) self._worker = Worker(self._queue, self) self._worker.start() atexit.register(self.close, self.options.grace_period) # start statsbeat on exporter instantiation if not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"): statsbeat_metrics.collect_statsbeat_metrics(self.options) # For redirects self._consecutive_redirects = 0 # To prevent circular redirects
def new_metrics_exporter(**options): options = Options(**options) exporter = MetricsExporter(options=options) transport.get_exporter_thread(stats.stats, exporter, interval=options.export_interval) return exporter
def test_transmission_206_500(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: mixin.storage = stor mixin.storage.put([1, 2, 3, 4, 5]) with mock.patch('requests.post') as post: post.return_value = MockResponse( 206, json.dumps({ 'itemsReceived': 5, 'itemsAccepted': 3, 'errors': [ { 'index': 0, 'statusCode': 400, 'message': '', }, { 'index': 2, 'statusCode': 500, 'message': 'Internal Server Error', }, ], })) mixin._transmit_from_storage() self.assertEqual(len(os.listdir(mixin.storage.path)), 1) self.assertEqual(mixin.storage.get().get(), (3, ))
def test_transmission_auth(self): mixin = TransportMixin() mixin.options = Options() url = 'https://dc.services.visualstudio.com' mixin.options.endpoint = url credential = mock.Mock() mixin.options.credential = credential token_mock = mock.Mock() token_mock.token = "test_token" credential.get_token.return_value = token_mock data = '[1, 2, 3]' headers = { 'Accept': 'application/json', 'Content-Type': 'application/json; charset=utf-8', 'Authorization': 'Bearer test_token', } with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: mixin.storage = stor mixin.storage.put([1, 2, 3]) with mock.patch('requests.post') as post: post.return_value = MockResponse(200, 'unknown') mixin._transmit_from_storage() post.assert_called_with(url=url + '/v2.1/track', data=data, headers=headers, timeout=10.0, proxies={}) credential.get_token.assert_called_with(_MONITOR_OAUTH_SCOPE) self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 0) credential.get_token.assert_called_once()
def __init__(self, options=None): if options is None: options = Options() self.options = options utils.validate_instrumentation_key(self.options.instrumentation_key) if self.options.max_batch_size <= 0: raise ValueError('Max batch size must be at least 1.') self.max_batch_size = self.options.max_batch_size
def test_get_stats_connection_string_eu(self): with mock.patch.dict( os.environ, {"APPLICATION_INSIGHTS_STATS_CONNECTION_STRING": ""}): cs = "InstrumentationKey=1aa11111-bbbb-1ccc-8ddd-eeeeffff3333;IngestionEndpoint=https://northeurope-0.in.applicationinsights.azure.com/" # noqa: E501 eu = Options(connection_string=cs, ) stats_cs = _get_stats_connection_string(eu.endpoint) self.assertEqual(stats_cs, _DEFAULT_EU_STATS_CONNECTION_STRING)
def test_export_metrics_histogram(self): metric = create_metric() options = Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd') metric.descriptor._type = MetricDescriptorType.CUMULATIVE_DISTRIBUTION exporter = metrics_exporter.MetricsExporter(options) self.assertIsNone(exporter.export_metrics([metric]))
def test_transmit_no_response(self, requests_mock, logger_mock): envelope = create_envelope() options = Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd') exporter = metrics_exporter.MetricsExporter(options) exporter._transmit_without_retry([envelope]) self.assertEqual(len(requests_mock.call_args_list), 1) self.assertEqual(len(logger_mock.call_args_list), 1)
def __init__(self, options=None): if options is None: options = Options() self.options = options if not self.options.instrumentation_key: raise ValueError('The instrumentation_key is not provided.') if self.options.max_batch_size <= 0: raise ValueError('Max batch size must be at least 1.') self.max_batch_size = self.options.max_batch_size
def __init__(self, **options): self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, ) super(AzureExporter, self).__init__(**options)
def test_transmission_pre_exception(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: mixin.storage = stor mixin.storage.put([1, 2, 3]) with mock.patch('requests.post', throw(Exception)): mixin._transmit_from_storage() self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 1)
def test_transmission_400(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: mixin.storage = stor mixin.storage.put([1, 2, 3]) with mock.patch('requests.post') as post: post.return_value = MockResponse(400, '{}') mixin._transmit_from_storage() self.assertEqual(len(os.listdir(mixin.storage.path)), 0)
def test_get_feature_metric_zero(self): # pylint: disable=protected-access options = Options( instrumentation_key="ikey", enable_local_storage=False, credential=None, ) stats = _StatsbeatMetrics(options) metric = stats._get_feature_metric() self.assertIsNone(metric)
def test_create_properties(self): metric = create_metric() options = Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd') exporter = metrics_exporter.MetricsExporter(options) properties = exporter.create_properties(metric.time_series[0], metric.descriptor) self.assertEqual(len(properties), 1) self.assertEqual(properties['key'], 'val')
def new_metrics_exporter(**options): options_ = Options(**options) exporter = MetricsExporter(options=options_) producers = [stats_module.stats] if options_.enable_standard_metrics: producers.append(standard_metrics.producer) transport.get_exporter_thread(producers, exporter, interval=options_.export_interval) return exporter
def test_export(self): exporter = trace_exporter.AzureExporter( Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', storage_path=os.path.join(TEST_FOLDER, 'foo'), ), ) exporter.transport = MockTransport() exporter.export(None) self.assertTrue(exporter.transport.export_called)
def __init__(self, **options): options = Options(**options) self.export_interval = options.export_interval self.max_batch_size = options.max_batch_size # TODO: queue should be moved to tracer # too much refactor work, leave to the next PR self._queue = Queue(capacity=options.queue_capacity) # TODO: worker should not be created in the base exporter self._worker = Worker(self._queue, self) self._worker.start() atexit.register(self._worker.stop, options.grace_period)
def test_check_stats_collection(self): mixin = TransportMixin() mixin.options = Options() mixin.options.enable_stats_metrics = True self.assertTrue(mixin._check_stats_collection()) mixin._is_stats = False self.assertTrue(mixin._check_stats_collection()) mixin._is_stats = True self.assertFalse(mixin._check_stats_collection()) mixin.options.enable_stats_metrics = False self.assertFalse(mixin._check_stats_collection())
def test_transmission_cred_exception(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: mixin.storage = stor mixin.storage.put([1, 2, 3]) with mock.patch('requests.post', throw(CredentialUnavailableError)): # noqa: E501 mixin._transmit_from_storage() self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 0)
def __init__(self, **options): self.options = Options(**options) if not self.options.instrumentation_key: raise ValueError('The instrumentation_key is not provided.') self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, ) super(AzureExporter, self).__init__(**options)
def test_transmission_nothing(self): exporter = trace_exporter.AzureExporter( Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', storage_path=os.path.join(TEST_FOLDER, 'baz'), ), ) with mock.patch('requests.post') as post: post.return_value = None exporter._transmission_routine()
def __init__(self, **options): self.options = Options(**options) self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, source=self.__class__.__name__, ) self._telemetry_processors = [] super(AzureExporter, self).__init__(**options)
def test_transmission_400(self): exporter = trace_exporter.AzureExporter( Options( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', storage_path=os.path.join(TEST_FOLDER, '400'), ), ) exporter.storage.put([1, 2, 3]) with mock.patch('requests.post') as post: post.return_value = MockResponse(400, '{}') exporter._transmission_routine() self.assertEqual(len(os.listdir(exporter.storage.path)), 0)