def test_put(self): test_input = (1, 2, 3) with LocalFileStorage(os.path.join(TEST_FOLDER, "bar")) as stor: stor.put(test_input) self.assertEqual(stor.get().get(), test_input) with LocalFileStorage(os.path.join(TEST_FOLDER, "bar")) as stor: self.assertEqual(stor.get().get(), test_input) with mock.patch("os.rename", side_effect=throw(Exception)): self.assertIsNone(stor.put(test_input))
def __init__(self, **options): self._telemetry_processors = [] self.options = ExporterOptions(**options) retry_policy = RetryPolicy(timeout=self.options.timeout) proxy_policy = ProxyPolicy(proxies=self.options.proxies) self.client = AzureMonitorClient( self.options.endpoint, proxy_policy=proxy_policy, retry_policy=retry_policy) self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, )
def test_check_storage_size_links(self): test_input = (1, 2, 3) with LocalFileStorage(os.path.join(TEST_FOLDER, "asd4"), 1000) as stor: stor.put(test_input) with mock.patch("os.path.islink") as os_mock: os_mock.return_value = True self.assertTrue(stor._check_storage_size())
def test_check_storage_size_error(self): test_input = (1, 2, 3) with LocalFileStorage(os.path.join(TEST_FOLDER, "asd5"), 1) as stor: with mock.patch("os.path.getsize", side_effect=throw(OSError)): stor.put(test_input) with mock.patch("os.path.islink") as os_mock: os_mock.return_value = True self.assertTrue(stor._check_storage_size())
def test_put_max_size(self): test_input = (1, 2, 3) with LocalFileStorage(os.path.join(TEST_FOLDER, "asd")) as stor: size_mock = mock.Mock() size_mock.return_value = False stor._check_storage_size = size_mock stor.put(test_input) self.assertEqual(stor.get(), None)
def __init__(self, **options): options = ExporterOptions(**options) parsed_connection_string = ConnectionStringParser( options.connection_string) self._instrumentation_key = parsed_connection_string.instrumentation_key self._timeout = 10.0 # networking timeout in seconds temp_suffix = self._instrumentation_key or "" default_storage_path = os.path.join(tempfile.gettempdir(), TEMPDIR_PREFIX + temp_suffix) retry_policy = RetryPolicy(timeout=self._timeout) self.client = AzureMonitorClient(parsed_connection_string.endpoint, retry_policy=retry_policy) self.storage = LocalFileStorage( path=default_storage_path, max_size=50 * 1024 * 1024, # Maximum size in bytes. maintenance_period=60, # Maintenance interval in seconds. retention_period=7 * 24 * 60 * 60, # Retention period in seconds )
def test_get(self): now = _now() with LocalFileStorage(os.path.join(TEST_FOLDER, "foo")) as stor: stor.put((1, 2, 3), lease_period=10) with mock.patch( "microsoft.opentelemetry.exporter.azuremonitor.storage._now" ) as m: m.return_value = now - _seconds(30 * 24 * 60 * 60) stor.put((1, 2, 3)) stor.put((1, 2, 3), lease_period=10) with mock.patch("os.rename"): stor.put((1, 2, 3)) with mock.patch("os.rename"): stor.put((1, 2, 3)) with mock.patch("os.remove", side_effect=throw(Exception)): with mock.patch("os.rename", side_effect=throw(Exception)): self.assertIsNone(stor.get()) self.assertIsNone(stor.get())
class BaseExporter: """Azure Monitor base exporter for OpenTelemetry. Args: options: :doc:`export.options` to allow configuration for the exporter """ def __init__(self, **options): self._telemetry_processors = [] self.options = ExporterOptions(**options) retry_policy = RetryPolicy(timeout=self.options.timeout) proxy_policy = ProxyPolicy(proxies=self.options.proxies) self.client = AzureMonitorClient( self.options.endpoint, proxy_policy=proxy_policy, retry_policy=retry_policy) self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, ) def add_telemetry_processor( self, processor: typing.Callable[..., any] ) -> None: """Adds telemetry processor to the collection. Telemetry processors will be called one by one before telemetry item is pushed for sending and in the order they were added. Args: processor: Processor to add """ self._telemetry_processors.append(processor) def clear_telemetry_processors(self) -> None: """Removes all telemetry processors""" self._telemetry_processors = [] def _apply_telemetry_processors( self, envelopes: typing.List[TelemetryItem] ) -> typing.List[TelemetryItem]: """Applies all telemetry processors in the order they were added. This function will return the list of envelopes to be exported after each processor has been run sequentially. Individual processors can throw exceptions and fail, but the applying of all telemetry processors will proceed (not fast fail). Processors also return True if envelope should be included for exporting, False otherwise. Args: envelopes: The envelopes to apply each processor to. """ filtered_envelopes = [] for envelope in envelopes: accepted = True for processor in self._telemetry_processors: try: if processor(envelope) is False: accepted = False break except Exception as ex: logger.warning("Telemetry processor failed with: %s.", ex) if accepted: filtered_envelopes.append(envelope) return filtered_envelopes def _transmit_from_storage(self) -> None: for blob in self.storage.gets(): # give a few more seconds for blob lease operation # to reduce the chance of race (for perf consideration) if blob.lease(self.options.timeout + 5): envelopes = blob.get() result = self._transmit(envelopes) if result == ExportResult.FAILED_RETRYABLE: blob.lease(1) else: blob.delete() # pylint: disable=too-many-branches # pylint: disable=too-many-nested-blocks # pylint: disable=too-many-return-statements def _transmit(self, envelopes: typing.List[TelemetryItem]) -> ExportResult: """ Transmit the data envelopes to the ingestion service. Returns an ExportResult, this function should never throw an exception. """ if len(envelopes) > 0: try: track_response = self.client.track(envelopes) if not track_response.errors: logger.info("Transmission succeeded: Item received: %s. Items accepted: %s", track_response.items_received, track_response.items_accepted) return ExportResult.SUCCESS resend_envelopes = [] for error in track_response.errors: if is_retryable_code(error.statusCode): resend_envelopes.append( envelopes[error.index] ) else: logger.error( "Data drop %s: %s %s.", error.statusCode, error.message, envelopes[error.index], ) if resend_envelopes: self.storage.put(resend_envelopes) except HttpResponseError as response_error: if is_retryable_code(response_error.status_code): return ExportResult.FAILED_RETRYABLE return ExportResult.FAILED_NOT_RETRYABLE except Exception as ex: logger.warning( "Retrying due to transient client side error %s.", ex ) # client side error (retryable) return ExportResult.FAILED_RETRYABLE return ExportResult.FAILED_NOT_RETRYABLE # No spans to export return ExportResult.SUCCESS
def test_get_nothing(self): with LocalFileStorage(os.path.join(TEST_FOLDER, "test", "a")) as stor: pass with LocalFileStorage(os.path.join(TEST_FOLDER, "test")) as stor: self.assertIsNone(stor.get())
def test_maintanence_routine(self): with mock.patch("os.makedirs") as m: with LocalFileStorage(os.path.join(TEST_FOLDER, "baz")) as stor: m.return_value = None with mock.patch("os.makedirs", side_effect=throw(Exception)): stor = LocalFileStorage(os.path.join(TEST_FOLDER, "baz")) stor.close() with mock.patch("os.listdir", side_effect=throw(Exception)): stor = LocalFileStorage(os.path.join(TEST_FOLDER, "baz")) stor.close() with LocalFileStorage(os.path.join(TEST_FOLDER, "baz")) as stor: with mock.patch("os.listdir", side_effect=throw(Exception)): stor._maintenance_routine() with mock.patch("os.path.isdir", side_effect=throw(Exception)): stor._maintenance_routine()
def test_check_storage_size_no_files(self): with LocalFileStorage(os.path.join(TEST_FOLDER, "asd3"), 1000) as stor: self.assertTrue(stor._check_storage_size())
def test_check_storage_size_not_full(self): test_input = (1, 2, 3) with LocalFileStorage(os.path.join(TEST_FOLDER, "asd3"), 1000) as stor: stor.put(test_input) self.assertTrue(stor._check_storage_size())
class BaseExporter: """Azure Monitor base exporter for OpenTelemetry. Args: options: :doc:`export.options` to allow configuration for the exporter """ def __init__(self, **options): options = ExporterOptions(**options) parsed_connection_string = ConnectionStringParser( options.connection_string) self._instrumentation_key = parsed_connection_string.instrumentation_key self._timeout = 10.0 # networking timeout in seconds temp_suffix = self._instrumentation_key or "" default_storage_path = os.path.join(tempfile.gettempdir(), TEMPDIR_PREFIX + temp_suffix) retry_policy = RetryPolicy(timeout=self._timeout) self.client = AzureMonitorClient(parsed_connection_string.endpoint, retry_policy=retry_policy) self.storage = LocalFileStorage( path=default_storage_path, max_size=50 * 1024 * 1024, # Maximum size in bytes. maintenance_period=60, # Maintenance interval in seconds. retention_period=7 * 24 * 60 * 60, # Retention period in seconds ) def _transmit_from_storage(self) -> None: for blob in self.storage.gets(): # give a few more seconds for blob lease operation # to reduce the chance of race (for perf consideration) if blob.lease(self._timeout + 5): envelopes = blob.get() result = self._transmit(list(envelopes)) if result == ExportResult.FAILED_RETRYABLE: blob.lease(1) else: blob.delete() # pylint: disable=too-many-branches # pylint: disable=too-many-nested-blocks # pylint: disable=too-many-return-statements def _transmit(self, envelopes: typing.List[TelemetryItem]) -> ExportResult: """ Transmit the data envelopes to the ingestion service. Returns an ExportResult, this function should never throw an exception. """ if len(envelopes) > 0: try: track_response = self.client.track(envelopes) if not track_response.errors: logger.info( "Transmission succeeded: Item received: %s. Items accepted: %s", track_response.items_received, track_response.items_accepted) return ExportResult.SUCCESS resend_envelopes = [] for error in track_response.errors: if is_retryable_code(error.statusCode): resend_envelopes.append(envelopes[error.index]) else: logger.error( "Data drop %s: %s %s.", error.statusCode, error.message, envelopes[error.index], ) if resend_envelopes: envelopes_to_store = map(lambda x: x.as_dict(), resend_envelopes) self.storage.put(envelopes_to_store) except HttpResponseError as response_error: if is_retryable_code(response_error.status_code): return ExportResult.FAILED_RETRYABLE return ExportResult.FAILED_NOT_RETRYABLE except Exception as ex: logger.warning( "Retrying due to transient client side error %s.", ex) # client side error (retryable) return ExportResult.FAILED_RETRYABLE return ExportResult.FAILED_NOT_RETRYABLE # No spans to export return ExportResult.SUCCESS