def test_get_nothing(self):
     with LocalFileStorage(os.path.join(TEST_FOLDER, 'test', 'a')) as stor:
         pass
     with LocalFileStorage(os.path.join(TEST_FOLDER, 'test')) as stor:
         self.assertIsNone(stor.get())
Exemplo n.º 2
0
class BaseLogHandler(logging.Handler):
    def __init__(self, **options):
        super(BaseLogHandler, self).__init__()
        self.options = Options(**options)
        utils.validate_instrumentation_key(self.options.instrumentation_key)
        if not 0 <= self.options.logging_sampling_rate <= 1:
            raise ValueError('Sampling must be in the range: [0,1]')
        self.export_interval = self.options.export_interval
        self.max_batch_size = self.options.max_batch_size
        self.storage = None
        if self.options.enable_local_storage:
            self.storage = LocalFileStorage(
                path=self.options.storage_path,
                max_size=self.options.storage_max_size,
                maintenance_period=self.options.storage_maintenance_period,
                retention_period=self.options.storage_retention_period,
                source=self.__class__.__name__,
            )
        self._telemetry_processors = []
        self.addFilter(SamplingFilter(self.options.logging_sampling_rate))
        self._queue = Queue(capacity=self.options.queue_capacity)
        self._worker = Worker(self._queue, self)
        self._worker.start()
        atexit.register(self.close, self.options.grace_period)
        # start statsbeat on exporter instantiation
        if not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"):
            statsbeat_metrics.collect_statsbeat_metrics(self.options)
        # For redirects
        self._consecutive_redirects = 0  # To prevent circular redirects

    def _export(self, batch, event=None):  # pragma: NO COVER
        try:
            if batch:
                envelopes = [self.log_record_to_envelope(x) for x in batch]
                envelopes = self.apply_telemetry_processors(envelopes)
                result = self._transmit(envelopes)
                # Only store files if local storage enabled
                if self.storage and result > 0:
                    self.storage.put(envelopes, result)
            if event:
                if isinstance(event, QueueExitEvent):
                    self._transmit_from_storage()  # send files before exit
                return
            if len(batch) < self.options.max_batch_size:
                self._transmit_from_storage()
        finally:
            if event:
                event.set()

    def close(self, timeout=None):
        if self.storage:
            self.storage.close()
        if self._worker:
            self._worker.stop(timeout)

    def createLock(self):
        self.lock = None

    def emit(self, record):
        self._queue.put(record, block=False)

    def log_record_to_envelope(self, record):
        raise NotImplementedError  # pragma: NO COVER

    def flush(self, timeout=None):
        self._queue.flush(timeout=timeout)
 def test_check_storage_size_not_full(self):
     input = (1, 2, 3)
     with LocalFileStorage(os.path.join(TEST_FOLDER, 'asd3'), 1000) as stor:
         stor.put(input)
         self.assertTrue(stor._check_storage_size())
 def test_check_storage_size_no_files(self):
     with LocalFileStorage(os.path.join(TEST_FOLDER, 'asd3'), 1000) as stor:
         self.assertTrue(stor._check_storage_size())
Exemplo n.º 5
0
class AzureLogHandler(TransportMixin, BaseLogHandler):
    """Handler for logging to Microsoft Azure Monitor.

    :param options: Options for the log handler.
    """
    def __init__(self, **options):
        self.options = Options(**options)
        if not self.options.instrumentation_key:
            raise ValueError('The instrumentation_key is not provided.')
        self.export_interval = self.options.export_interval
        self.max_batch_size = self.options.max_batch_size
        self.storage = LocalFileStorage(
            path=self.options.storage_path,
            max_size=self.options.storage_max_size,
            maintenance_period=self.options.storage_maintenance_period,
            retention_period=self.options.storage_retention_period,
        )
        super(AzureLogHandler, self).__init__()

    def close(self):
        self.storage.close()
        super(AzureLogHandler, self).close()

    def _export(self, batch, event=None):  # pragma: NO COVER
        try:
            if batch:
                envelopes = [self.log_record_to_envelope(x) for x in batch]
                result = self._transmit(envelopes)
                if result > 0:
                    self.storage.put(envelopes, result)
            if event:
                if isinstance(event, QueueExitEvent):
                    self._transmit_from_storage()  # send files before exit
                return
            if len(batch) < self.options.max_batch_size:
                self._transmit_from_storage()
        finally:
            if event:
                event.set()

    def log_record_to_envelope(self, record):
        envelope = Envelope(
            iKey=self.options.instrumentation_key,
            tags=dict(utils.azure_monitor_context),
            time=utils.timestamp_to_iso_str(record.created),
        )
        envelope.tags['ai.operation.id'] = getattr(
            record,
            'traceId',
            '00000000000000000000000000000000',
        )
        envelope.tags['ai.operation.parentId'] = '|{}.{}.'.format(
            envelope.tags['ai.operation.id'],
            getattr(record, 'spanId', '0000000000000000'),
        )
        properties = {
            'process': record.processName,
            'module': record.module,
            'fileName': record.pathname,
            'lineNumber': record.lineno,
            'level': record.levelname,
        }
        if record.exc_info:
            exctype, _value, tb = record.exc_info
            callstack = []
            level = 0
            for fileName, line, method, _text in traceback.extract_tb(tb):
                callstack.append({
                    'level': level,
                    'method': method,
                    'fileName': fileName,
                    'line': line,
                })
                level += 1
            callstack.reverse()

            envelope.name = 'Microsoft.ApplicationInsights.Exception'
            data = ExceptionData(
                exceptions=[{
                    'id': 1,
                    'outerId': 0,
                    'typeName': exctype.__name__,
                    'message': self.format(record),
                    'hasFullStack': True,
                    'parsedStack': callstack,
                }],
                severityLevel=max(0, record.levelno - 1) // 10,
                properties=properties,
            )
            envelope.data = Data(baseData=data, baseType='ExceptionData')
        else:
            envelope.name = 'Microsoft.ApplicationInsights.Message'
            data = Message(
                message=self.format(record),
                severityLevel=max(0, record.levelno - 1) // 10,
                properties=properties,
            )
            envelope.data = Data(baseData=data, baseType='MessageData')
        return envelope
Exemplo n.º 6
0
class AzureExporter(TransportMixin, BaseExporter):
    """An exporter that sends traces to Microsoft Azure Monitor.

    :param options: Options for the exporter.
    """
    def __init__(self, **options):
        self.options = Options(**options)
        if not self.options.instrumentation_key:
            raise ValueError('The instrumentation_key is not provided.')
        self.storage = LocalFileStorage(
            path=self.options.storage_path,
            max_size=self.options.storage_max_size,
            maintenance_period=self.options.storage_maintenance_period,
            retention_period=self.options.storage_retention_period,
        )
        super(AzureExporter, self).__init__(**options)

    def span_data_to_envelope(self, sd):
        envelope = Envelope(
            iKey=self.options.instrumentation_key,
            tags=dict(utils.azure_monitor_context),
            time=sd.start_time,
        )
        envelope.tags['ai.operation.id'] = sd.context.trace_id
        if sd.parent_span_id:
            envelope.tags['ai.operation.parentId'] = '|{}.{}.'.format(
                sd.context.trace_id,
                sd.parent_span_id,
            )
        if sd.span_kind == SpanKind.SERVER:
            envelope.name = 'Microsoft.ApplicationInsights.Request'
            data = Request(
                id='|{}.{}.'.format(sd.context.trace_id, sd.span_id),
                duration=utils.timestamp_to_duration(
                    sd.start_time,
                    sd.end_time,
                ),
                responseCode='0',  # TODO
                success=True,  # TODO
                properties={},
            )
            envelope.data = Data(baseData=data, baseType='RequestData')
            if 'http.method' in sd.attributes:
                data.name = sd.attributes['http.method']
            if 'http.url' in sd.attributes:
                data.name = data.name + ' ' + sd.attributes['http.url']
                data.url = sd.attributes['http.url']
            if 'http.status_code' in sd.attributes:
                data.responseCode = str(sd.attributes['http.status_code'])
        else:
            envelope.name = \
                'Microsoft.ApplicationInsights.RemoteDependency'
            data = RemoteDependency(
                name=sd.name,  # TODO
                id='|{}.{}.'.format(sd.context.trace_id, sd.span_id),
                resultCode='0',  # TODO
                duration=utils.timestamp_to_duration(
                    sd.start_time,
                    sd.end_time,
                ),
                success=True,  # TODO
                properties={},
            )
            envelope.data = Data(
                baseData=data,
                baseType='RemoteDependencyData',
            )
            if sd.span_kind == SpanKind.CLIENT:
                data.type = 'HTTP'  # TODO
                if 'http.url' in sd.attributes:
                    url = sd.attributes['http.url']
                    # TODO: error handling, probably put scheme as well
                    data.name = utils.url_to_dependency_name(url)
                if 'http.status_code' in sd.attributes:
                    data.resultCode = str(sd.attributes['http.status_code'])
            else:
                data.type = 'INPROC'
        # TODO: links, tracestate, tags
        for key in sd.attributes:
            data.properties[key] = sd.attributes[key]
        return envelope

    def emit(self, batch, event=None):
        try:
            if batch:
                envelopes = [self.span_data_to_envelope(sd) for sd in batch]
                result = self._transmit(envelopes)
                if result > 0:
                    self.storage.put(envelopes, result)
            if event:
                if isinstance(event, QueueExitEvent):
                    self._transmit_from_storage()  # send files before exit
                event.set()
                return
            if len(batch) < self.options.max_batch_size:
                self._transmit_from_storage()
        except Exception:
            logger.exception('Exception occurred while exporting the data.')

    def _stop(self, timeout=None):
        self.storage.close()
        return self._worker.stop(timeout)
class MetricsExporter(TransportMixin, ProcessorMixin):
    """Metrics exporter for Microsoft Azure Monitor."""

    def __init__(self, **options):
        self.options = Options(**options)
        utils.validate_instrumentation_key(self.options.instrumentation_key)
        if self.options.max_batch_size <= 0:
            raise ValueError('Max batch size must be at least 1.')
        self.export_interval = self.options.export_interval
        self.max_batch_size = self.options.max_batch_size
        self._telemetry_processors = []
        self.storage = LocalFileStorage(
            path=self.options.storage_path,
            max_size=self.options.storage_max_size,
            maintenance_period=self.options.storage_maintenance_period,
            retention_period=self.options.storage_retention_period,
        )
        super(MetricsExporter, self).__init__()

    def export_metrics(self, metrics):
        envelopes = []
        for metric in metrics:
            envelopes.extend(self.metric_to_envelopes(metric))
        # Send data in batches of max_batch_size
        batched_envelopes = list(common_utils.window(
            envelopes, self.max_batch_size))
        for batch in batched_envelopes:
            batch = self.apply_telemetry_processors(batch)
            result = self._transmit(batch)
            if result > 0:
                self.storage.put(batch, result)

        # If there is still room to transmit envelopes, transmit from storage
        # if available
        if len(envelopes) < self.options.max_batch_size:
            self._transmit_from_storage()

    def metric_to_envelopes(self, metric):
        envelopes = []
        # No support for histogram aggregations
        if (metric.descriptor.type !=
                MetricDescriptorType.CUMULATIVE_DISTRIBUTION):
            md = metric.descriptor
            # Each time series will be uniquely identified by its
            # label values
            for time_series in metric.time_series:
                # Using stats, time_series should only have one
                # point which contains the aggregated value
                data_point = self._create_data_points(
                    time_series, md)[0]
                # The timestamp is when the metric was recorded
                timestamp = time_series.points[0].timestamp
                # Get the properties using label keys from metric
                # and label values of the time series
                properties = self._create_properties(time_series, md)
                envelopes.append(self._create_envelope(data_point,
                                                       timestamp,
                                                       properties))
        return envelopes

    def _create_data_points(self, time_series, metric_descriptor):
        """Convert a metric's OC time series to list of Azure data points."""
        data_points = []
        for point in time_series.points:
            # TODO: Possibly encode namespace in name
            data_point = DataPoint(ns=metric_descriptor.name,
                                   name=metric_descriptor.name,
                                   value=point.value.value)
            data_points.append(data_point)
        return data_points

    def _create_properties(self, time_series, metric_descriptor):
        properties = {}
        # We construct a properties map from the label keys and values. We
        # assume the ordering is already correct
        for i in range(len(metric_descriptor.label_keys)):
            if time_series.label_values[i].value is None:
                value = "null"
            else:
                value = time_series.label_values[i].value
            properties[metric_descriptor.label_keys[i].key] = value
        return properties

    def _create_envelope(self, data_point, timestamp, properties):
        envelope = Envelope(
            iKey=self.options.instrumentation_key,
            tags=dict(utils.azure_monitor_context),
            time=timestamp.isoformat(),
        )
        envelope.name = "Microsoft.ApplicationInsights.Metric"
        data = MetricData(
            metrics=[data_point],
            properties=properties
        )
        envelope.data = Data(baseData=data, baseType="MetricData")
        return envelope
Exemplo n.º 8
0
class AzureExporter(BaseExporter, ProcessorMixin, TransportMixin):
    """An exporter that sends traces to Microsoft Azure Monitor.

    :param options: Options for the exporter.
    """

    def __init__(self, **options):
        self.options = Options(**options)
        utils.validate_instrumentation_key(self.options.instrumentation_key)
        self.storage = None
        if self.options.enable_local_storage:
            self.storage = LocalFileStorage(
                path=self.options.storage_path,
                max_size=self.options.storage_max_size,
                maintenance_period=self.options.storage_maintenance_period,
                retention_period=self.options.storage_retention_period,
                source=self.__class__.__name__,
            )
        self._telemetry_processors = []
        super(AzureExporter, self).__init__(**options)
        atexit.register(self._stop, self.options.grace_period)

    def span_data_to_envelope(self, sd):
        envelope = Envelope(
            iKey=self.options.instrumentation_key,
            tags=dict(utils.azure_monitor_context),
            time=sd.start_time,
        )

        envelope.tags['ai.operation.id'] = sd.context.trace_id
        if sd.parent_span_id:
            envelope.tags['ai.operation.parentId'] = '{}'.format(
                sd.parent_span_id,
            )
        if sd.span_kind == SpanKind.SERVER:
            envelope.name = 'Microsoft.ApplicationInsights.Request'
            data = Request(
                id='{}'.format(sd.span_id),
                duration=utils.timestamp_to_duration(
                    sd.start_time,
                    sd.end_time,
                ),
                responseCode=str(sd.status.code),
                success=False,  # Modify based off attributes or status
                properties={},
            )
            envelope.data = Data(baseData=data, baseType='RequestData')
            data.name = ''
            if 'http.method' in sd.attributes:
                data.name = sd.attributes['http.method']
            if 'http.route' in sd.attributes:
                data.name = data.name + ' ' + sd.attributes['http.route']
                envelope.tags['ai.operation.name'] = data.name
                data.properties['request.name'] = data.name
            elif 'http.path' in sd.attributes:
                data.properties['request.name'] = data.name + \
                    ' ' + sd.attributes['http.path']
            if 'http.url' in sd.attributes:
                data.url = sd.attributes['http.url']
                data.properties['request.url'] = sd.attributes['http.url']
            if 'http.status_code' in sd.attributes:
                status_code = sd.attributes['http.status_code']
                data.responseCode = str(status_code)
                data.success = status_code >= 200 and status_code <= 399
            elif sd.status.code == 0:
                data.success = True
        else:
            envelope.name = \
                'Microsoft.ApplicationInsights.RemoteDependency'
            data = RemoteDependency(
                name=sd.name,  # TODO
                id='{}'.format(sd.span_id),
                resultCode=str(sd.status.code),
                duration=utils.timestamp_to_duration(
                    sd.start_time,
                    sd.end_time,
                ),
                success=False,  # Modify based off attributes or status
                properties={},
            )
            envelope.data = Data(
                baseData=data,
                baseType='RemoteDependencyData',
            )
            if sd.span_kind == SpanKind.CLIENT:
                data.type = sd.attributes.get('component')
                if 'http.url' in sd.attributes:
                    url = sd.attributes['http.url']
                    # TODO: error handling, probably put scheme as well
                    data.data = url
                    parse_url = urlparse(url)
                    # target matches authority (host:port)
                    data.target = parse_url.netloc
                    if 'http.method' in sd.attributes:
                        # name is METHOD/path
                        data.name = sd.attributes['http.method'] \
                            + ' ' + parse_url.path
                if 'http.status_code' in sd.attributes:
                    status_code = sd.attributes["http.status_code"]
                    data.resultCode = str(status_code)
                    try:
                        data.success = 200 <= int(status_code) < 400
                    except (ValueError, TypeError):
                        data.success = status_code

                elif sd.status.code == 0:
                    data.success = True
            else:
                data.type = 'INPROC'
                data.success = True
        if sd.links:
            links = []
            for link in sd.links:
                links.append(
                    {"operation_Id": link.trace_id, "id": link.span_id})
            data.properties["_MS.links"] = json.dumps(links)
        # TODO: tracestate, tags
        for key in sd.attributes:
            # This removes redundant data from ApplicationInsights
            if key.startswith('http.'):
                continue
            data.properties[key] = sd.attributes[key]
        return envelope

    def emit(self, batch, event=None):
        try:
            if batch:
                envelopes = [self.span_data_to_envelope(sd) for sd in batch]
                envelopes = self.apply_telemetry_processors(envelopes)
                result = self._transmit(envelopes)
                # Only store files if local storage enabled
                if self.storage and result > 0:
                    self.storage.put(envelopes, result)
            if event:
                if isinstance(event, QueueExitEvent):
                    self._transmit_from_storage()  # send files before exit
                event.set()
                return
            if len(batch) < self.options.max_batch_size:
                self._transmit_from_storage()
        except Exception:
            logger.exception('Exception occurred while exporting the data.')

    def _stop(self, timeout=None):
        if self.storage:
            self.storage.close()
        if self._worker:
            self._worker.stop(timeout)
class BaseLogHandler(logging.Handler):
    def __init__(self, **options):
        super(BaseLogHandler, self).__init__()
        self.options = Options(**options)
        utils.validate_instrumentation_key(self.options.instrumentation_key)
        if not 0 <= self.options.logging_sampling_rate <= 1:
            raise ValueError('Sampling must be in the range: [0,1]')
        self.export_interval = self.options.export_interval
        self.max_batch_size = self.options.max_batch_size
        self.storage = None
        if self.options.enable_local_storage:
            self.storage = LocalFileStorage(
                path=self.options.storage_path,
                max_size=self.options.storage_max_size,
                maintenance_period=self.options.storage_maintenance_period,
                retention_period=self.options.storage_retention_period,
                source=self.__class__.__name__,
            )
        self._telemetry_processors = []
        self.addFilter(SamplingFilter(self.options.logging_sampling_rate))
        self._queue = Queue(capacity=self.options.queue_capacity)
        self._worker = Worker(self._queue, self)
        self._worker.start()
        atexit.register(self.close, self.options.grace_period)
        # start statsbeat on exporter instantiation
        if not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"):
            statsbeat_metrics.collect_statsbeat_metrics(self.options)
        # For redirects
        self._consecutive_redirects = 0  # To prevent circular redirects

    def _export(self, batch, event=None):  # pragma: NO COVER
        try:
            if batch:
                envelopes = [self.log_record_to_envelope(x) for x in batch]
                envelopes = self.apply_telemetry_processors(envelopes)
                result = self._transmit(envelopes)
                # Only store files if local storage enabled
                if self.storage and result > 0:
                    self.storage.put(envelopes, result)
            if event:
                if isinstance(event, QueueExitEvent):
                    self._transmit_from_storage()  # send files before exit
                return
            if len(batch) < self.options.max_batch_size:
                self._transmit_from_storage()
        finally:
            if event:
                event.set()

    def close(self, timeout=None):
        if self.storage:
            self.storage.close()
        if self._worker:
            self._worker.stop(timeout)

    def createLock(self):
        self.lock = None

    def emit(self, record):
        self._queue.put(record, block=False)

    def log_record_to_envelope(self, record):
        raise NotImplementedError  # pragma: NO COVER

    def flush(self, timeout=None):
        if self._queue.is_empty():
            return

        # We must check the worker thread is alive, because otherwise flush
        # is useless. Also, it would deadlock if no timeout is given, and the
        # queue isn't empty.
        # This is a very possible scenario during process termination, when
        # atexit first calls handler.close() and then logging.shutdown(),
        # that in turn calls handler.flush() without arguments.
        if not self._worker.is_alive():
            logger.warning(
                "Can't flush %s, worker thread is dead. "
                "Any pending messages will be lost.", self)
            return

        self._queue.flush(timeout=timeout)
Exemplo n.º 10
0
class AzureExporter(base_exporter.Exporter):
    """An exporter that sends traces to Microsoft Azure Monitor.

    :type options: dict
    :param options: Options for the exporter. Defaults to None.
    """

    def __init__(self, **options):
        self.options = Options(**options)
        if not self.options.instrumentation_key:
            raise ValueError('The instrumentation_key is not provided.')
        self.storage = LocalFileStorage(
            path=self.options.storage_path,
            max_size=self.options.storage_max_size,
            maintenance_period=self.options.storage_maintenance_period,
            retention_period=self.options.storage_retention_period,
        )
        self.transport = AsyncTransport(
            self,
            max_batch_size=100,
            wait_period=self.options.export_interval,
        )
        self._transmission_task = PeriodicTask(
            interval=self.options.storage_maintenance_period,
            function=self._transmission_routine,
        )
        self._transmission_task.daemon = True
        self._transmission_task.start()

    def span_data_to_envelope(self, sd):
        envelope = Envelope(
            iKey=self.options.instrumentation_key,
            tags=dict(utils.azure_monitor_context),
            time=sd.start_time,
        )
        envelope.tags['ai.operation.id'] = sd.context.trace_id
        if sd.parent_span_id:
            envelope.tags['ai.operation.parentId'] = '|{}.{}.'.format(
                sd.context.trace_id,
                sd.parent_span_id,
            )
        if sd.span_kind == SpanKind.SERVER:
            envelope.name = 'Microsoft.ApplicationInsights.Request'
            data = Request(
                id='|{}.{}.'.format(sd.context.trace_id, sd.span_id),
                duration=utils.timestamp_to_duration(
                    sd.start_time,
                    sd.end_time,
                ),
                responseCode='0',  # TODO
                success=True,  # TODO
            )
            envelope.data = Data(baseData=data, baseType='RequestData')
            if 'http.method' in sd.attributes:
                data.name = sd.attributes['http.method']
            if 'http.url' in sd.attributes:
                data.name = data.name + ' ' + sd.attributes['http.url']
                data.url = sd.attributes['http.url']
            if 'http.status_code' in sd.attributes:
                data.responseCode = str(sd.attributes['http.status_code'])
        else:
            envelope.name = \
                'Microsoft.ApplicationInsights.RemoteDependency'
            data = RemoteDependency(
                name=sd.name,  # TODO
                id='|{}.{}.'.format(sd.context.trace_id, sd.span_id),
                resultCode='0',  # TODO
                duration=utils.timestamp_to_duration(
                    sd.start_time,
                    sd.end_time,
                ),
                success=True,  # TODO
            )
            envelope.data = Data(
                baseData=data,
                baseType='RemoteDependencyData',
            )
            if sd.span_kind == SpanKind.CLIENT:
                data.type = 'HTTP'  # TODO
                if 'http.url' in sd.attributes:
                    url = sd.attributes['http.url']
                    # TODO: error handling, probably put scheme as well
                    data.name = utils.url_to_dependency_name(url)
                if 'http.status_code' in sd.attributes:
                    data.resultCode = str(sd.attributes['http.status_code'])
            else:
                data.type = 'INPROC'
        # TODO: links, tracestate, tags, attrs
        return envelope

    def _transmission_routine(self):
        for blob in self.storage.gets():
            if blob.lease(self.options.timeout + 5):
                envelopes = blob.get()  # TODO: handle error
                result = self._transmit(envelopes)
                if result > 0:
                    blob.lease(result)
                else:
                    blob.delete(silent=True)

    def _transmit(self, envelopes):
        """
        Transmit the data envelopes to the ingestion service.
        Return a negative value for partial success or non-retryable failure.
        Return 0 if all envelopes have been successfully ingested.
        Return the next retry time in seconds for retryable failure.
        This function should never throw exception.
        """
        if not envelopes:
            return 0
        # TODO: prevent requests being tracked
        blacklist_hostnames = execution_context.get_opencensus_attr(
            'blacklist_hostnames',
        )
        execution_context.set_opencensus_attr(
            'blacklist_hostnames',
            ['dc.services.visualstudio.com'],
        )
        try:
            response = requests.post(
                url=self.options.endpoint,
                data=json.dumps(envelopes),
                headers={
                    'Accept': 'application/json',
                    'Content-Type': 'application/json; charset=utf-8',
                },
                timeout=self.options.timeout,
            )
        except Exception as ex:  # TODO: consider RequestException
            logger.warning('Transient client side error %s.', ex)
            # client side error (retryable)
            return self.options.minimum_retry_interval
        finally:
            execution_context.set_opencensus_attr(
                'blacklist_hostnames',
                blacklist_hostnames,
            )
        text = 'N/A'
        data = None
        try:
            text = response.text
        except Exception as ex:
            logger.warning('Error while reading response body %s.', ex)
        else:
            try:
                data = json.loads(text)
            except Exception:
                pass
        if response.status_code == 200:
            logger.info('Transmission succeeded: %s.', text)
            return 0
        if response.status_code == 206:  # Partial Content
            # TODO: store the unsent data
            if data:
                try:
                    resend_envelopes = []
                    for error in data['errors']:
                        if error['statusCode'] in (
                                429,  # Too Many Requests
                                500,  # Internal Server Error
                                503,  # Service Unavailable
                        ):
                            resend_envelopes.append(envelopes[error['index']])
                        else:
                            logger.error(
                                'Data drop %s: %s %s.',
                                error['statusCode'],
                                error['message'],
                                envelopes[error['index']],
                            )
                    if resend_envelopes:
                        self.storage.put(resend_envelopes)
                except Exception as ex:
                    logger.error(
                        'Error while processing %s: %s %s.',
                        response.status_code,
                        text,
                        ex,
                    )
                return -response.status_code
            # cannot parse response body, fallback to retry
        if response.status_code in (
                206,  # Partial Content
                429,  # Too Many Requests
                500,  # Internal Server Error
                503,  # Service Unavailable
        ):
            logger.warning(
                'Transient server side error %s: %s.',
                response.status_code,
                text,
            )
            # server side error (retryable)
            return self.options.minimum_retry_interval
        logger.error(
            'Non-retryable server side error %s: %s.',
            response.status_code,
            text,
        )
        # server side error (non-retryable)
        return -response.status_code

    def emit(self, span_datas):
        """
        :type span_datas: list of :class:
            `~opencensus.trace.span_data.SpanData`
        :param list of opencensus.trace.span_data.SpanData span_datas:
            SpanData tuples to emit
        """
        envelopes = [self.span_data_to_envelope(sd) for sd in span_datas]
        result = self._transmit(envelopes)
        if result > 0:
            self.storage.put(envelopes, result)

    def export(self, span_datas):
        """
        :type span_datas: list of :class:
            `~opencensus.trace.span_data.SpanData`
        :param list of opencensus.trace.span_data.SpanData span_datas:
            SpanData tuples to export
        """
        self.transport.export(span_datas)
Exemplo n.º 11
0
class BaseLogHandler(logging.Handler):

    def __init__(self, **options):
        super(BaseLogHandler, self).__init__()
        self.options = Options(**options)
        
        logger.debug("Setting up AzureLogHandler")
        if not 0 <= self.options.logging_sampling_rate <= 1:
            raise ValueError('Sampling must be in the range: [0,1]')
        self.export_interval = self.options.export_interval
        self.max_batch_size = self.options.max_batch_size
        self.storage = LocalFileStorage(
            path=self.options.storage_path,
            max_size=self.options.storage_max_size,
            maintenance_period=self.options.storage_maintenance_period,
            retention_period=self.options.storage_retention_period,
            source=self.__class__.__name__,
        )
        self._telemetry_processors = []
        self.addFilter(SamplingFilter(self.options.logging_sampling_rate))
        self._queue = Queue(capacity=8192)  # TODO: make this configurable
        self._worker = Worker(self._queue, self)
        self._worker.start()

        # TODO: Make enable/disable heartbeat configurable. Disabling it for now.
        # heartbeat_metrics.enable_heartbeat_metrics(
        #    self.options.connection_string, self.options.instrumentation_key)

    def _export(self, batch, event=None):  # pragma: NO COVER
        try:
            if batch:
                envelopes = [self.log_record_to_envelope(x) for x in batch]
                envelopes = self.apply_telemetry_processors(envelopes)
                result = self._transmit(envelopes)
                if result > 0:
                    self.storage.put(envelopes, result)
            if event:
                if isinstance(event, QueueExitEvent):
                    self._transmit_from_storage()  # send files before exit
                return
            if len(batch) < self.options.max_batch_size:
                self._transmit_from_storage()
        finally:
            if event:
                event.set()

    def close(self):
        self.storage.close()
        self._worker.stop()

    def createLock(self):
        self.lock = None

    def emit(self, record):
        self._queue.put(record, block=False)

    def log_record_to_envelope(self, record):
        raise NotImplementedError  # pragma: NO COVER

    def flush(self, timeout=None):
        self._queue.flush(timeout=timeout)