def __init__(self, **options): super(BaseLogHandler, self).__init__() self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) if not 0 <= self.options.logging_sampling_rate <= 1: raise ValueError('Sampling must be in the range: [0,1]') self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self.storage = None if self.options.enable_local_storage: self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, source=self.__class__.__name__, ) self._telemetry_processors = [] self.addFilter(SamplingFilter(self.options.logging_sampling_rate)) self._queue = Queue(capacity=self.options.queue_capacity) self._worker = Worker(self._queue, self) self._worker.start() atexit.register(self.close, self.options.grace_period) # start statsbeat on exporter instantiation if not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"): statsbeat_metrics.collect_statsbeat_metrics(self.options) # For redirects self._consecutive_redirects = 0 # To prevent circular redirects
def __init__(self, options=None): if options is None: options = Options() self.options = options utils.validate_instrumentation_key(self.options.instrumentation_key) if self.options.max_batch_size <= 0: raise ValueError('Max batch size must be at least 1.') self.max_batch_size = self.options.max_batch_size
def __init__(self, **options): self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, ) super(AzureExporter, self).__init__(**options)
def __init__(self, **options): self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, source=self.__class__.__name__, ) self._telemetry_processors = [] super(AzureExporter, self).__init__(**options) atexit.register(self._stop, self.options.grace_period)
def __init__(self, **options): self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, source=self.__class__.__name__, ) self._telemetry_processors = [] super(AzureExporter, self).__init__(**options) heartbeat_metrics.enable_heartbeat_metrics( self.options.connection_string, self.options.instrumentation_key)
def _get_app_insights_key(self): """Get Application Insights Key.""" try: if self.app_insights_key is None: self.app_insights_key = getenv( self.APPINSIGHTS_INSTRUMENTATION_KEY, None ) if self.app_insights_key is not None: utils.validate_instrumentation_key(self.app_insights_key) return self.app_insights_key else: raise Exception("ApplicationInsights Key is not set") except Exception as exp: raise Exception(f"Exception is getting app insights key-> {exp}")
def __init__(self, **options): self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) if self.options.max_batch_size <= 0: raise ValueError('Max batch size must be at least 1.') self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self._telemetry_processors = [] self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, ) super(MetricsExporter, self).__init__()
def __init__(self, **options): self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) if not 0 <= self.options.logging_sampling_rate <= 1: raise ValueError('Sampling must be in the range: [0,1]') self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, ) self._telemetry_processors = [] super(AzureLogHandler, self).__init__() self.addFilter(SamplingFilter(self.options.logging_sampling_rate))
def __init__(self, **options): self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) if self.options.max_batch_size <= 0: raise ValueError('Max batch size must be at least 1.') self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self._telemetry_processors = [] self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, source=self.__class__.__name__, ) self._atexit_handler = atexit.register(self.shutdown) self.exporter_thread = None super(MetricsExporter, self).__init__()
def __init__(self, **options): super(BaseLogHandler, self).__init__() self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) if not 0 <= self.options.logging_sampling_rate <= 1: raise ValueError('Sampling must be in the range: [0,1]') self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, ) self._telemetry_processors = [] self.addFilter(SamplingFilter(self.options.logging_sampling_rate)) self._queue = Queue(capacity=8192) # TODO: make this configurable self._worker = Worker(self._queue, self) self._worker.start()
def __init__(self, **options): self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) self.storage = None if self.options.enable_local_storage: self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, source=self.__class__.__name__, ) self._telemetry_processors = [] super(AzureExporter, self).__init__(**options) atexit.register(self._stop, self.options.grace_period) # start statsbeat on exporter instantiation if not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"): statsbeat_metrics.collect_statsbeat_metrics(self.options) # For redirects self._consecutive_redirects = 0 # To prevent circular redirects
def __init__(self, **options): super(BaseLogHandler, self).__init__() self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) if not 0 <= self.options.logging_sampling_rate <= 1: raise ValueError('Sampling must be in the range: [0,1]') self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, source=self.__class__.__name__, ) self._telemetry_processors = [] self.addFilter(SamplingFilter(self.options.logging_sampling_rate)) self._queue = Queue(capacity=self.options.queue_capacity) self._worker = Worker(self._queue, self) self._worker.start() heartbeat_metrics.enable_heartbeat_metrics( self.options.connection_string, self.options.instrumentation_key)
def test_invalid_key_section4_length(self): key = '1234abcd-678-4efa-8bc-11234567890ab' self.assertRaises(ValueError, lambda: utils.validate_instrumentation_key(key))
def test_invalid_key_variant(self): key = '1234abcd-5678-4efa-2abc-1234567890ab' self.assertRaises(ValueError, lambda: utils.validate_instrumentation_key(key))
def test_validate_instrumentation_key(self): key = '1234abcd-5678-4efa-8abc-1234567890ab' self.assertIsNone(utils.validate_instrumentation_key(key))
def test_invalid_key_empty(self): key = '' self.assertRaises(ValueError, lambda: utils.validate_instrumentation_key(key))
def test_invalid_key_section1_hex(self): key = 'x234abcd-5678-4efa-8abc-1234567890ab' self.assertRaises(ValueError, lambda: utils.validate_instrumentation_key(key))