Ejemplo n.º 1
0
 def __init__(
         self,
         path,
         max_size=100 * 1024 * 1024,  # 100MB
         maintenance_period=60,  # 1 minute
         retention_period=7 * 24 * 60 * 60,  # 7 days
         write_timeout=60,  # 1 minute
 ):
     self.path = os.path.abspath(path)
     self.max_size = max_size
     self.maintenance_period = maintenance_period
     self.retention_period = retention_period
     self.write_timeout = write_timeout
     self._maintenance_routine(silent=False)
     self._maintenance_task = PeriodicTask(
         interval=self.maintenance_period,
         function=self._maintenance_routine,
         kwargs={'silent': True},
     )
     self._maintenance_task.daemon = True
     self._maintenance_task.start()
 def __init__(
         self,
         path,
         max_size=50 * 1024 * 1024,  # 50MiB
         maintenance_period=60,  # 1 minute
         retention_period=7 * 24 * 60 * 60,  # 7 days
         write_timeout=60,  # 1 minute
 ):
     self.path = os.path.abspath(path)
     self.max_size = max_size
     self.maintenance_period = maintenance_period
     self.retention_period = retention_period
     self.write_timeout = write_timeout
     # Run maintenance routine once upon instantiating
     self._maintenance_routine()
     self._maintenance_task = PeriodicTask(
         interval=self.maintenance_period,
         function=self._maintenance_routine,
     )
     self._maintenance_task.daemon = True
     self._maintenance_task.start()
Ejemplo n.º 3
0
 def __init__(self, **options):
     self.options = Options(**options)
     if not self.options.instrumentation_key:
         raise ValueError('The instrumentation_key is not provided.')
     self.storage = LocalFileStorage(
         path=self.options.storage_path,
         max_size=self.options.storage_max_size,
         maintenance_period=self.options.storage_maintenance_period,
         retention_period=self.options.storage_retention_period,
     )
     self.transport = AsyncTransport(
         self,
         max_batch_size=100,
         wait_period=self.options.export_interval,
     )
     self._transmission_task = PeriodicTask(
         interval=self.options.storage_maintenance_period,
         function=self._transmission_routine,
     )
     self._transmission_task.daemon = True
     self._transmission_task.start()
Ejemplo n.º 4
0
    def test_gets_event(self):
        queue = Queue(capacity=10)
        event = QueueEvent('test')
        queue.puts((event, 1, 2, 3, event))
        result = queue.gets(count=5, timeout=TIMEOUT)
        self.assertEqual(result, (event, ))
        result = queue.gets(count=5, timeout=TIMEOUT)
        self.assertEqual(result, (1, 2, 3, event))

        task = PeriodicTask(TIMEOUT / 10, lambda: queue.put(1))
        task.start()
        try:
            result = queue.gets(count=5, timeout=TIMEOUT)
            self.assertEqual(result, (1, 1, 1, 1, 1))
        finally:
            task.cancel()
            task.join()
Ejemplo n.º 5
0
    def test_flush_timeout(self):
        queue = Queue(capacity=10)
        self.assertEqual(queue.flush(timeout=TIMEOUT), 0)
        queue.put('test', timeout=TIMEOUT)
        self.assertIsNone(queue.flush(timeout=TIMEOUT))
        queue.puts(range(100), timeout=TIMEOUT)
        self.assertIsNone(queue.flush(timeout=TIMEOUT))

        def proc():
            for item in queue.gets(count=1, timeout=TIMEOUT):
                if isinstance(item, QueueEvent):
                    item.set()

        task = PeriodicTask(TIMEOUT / 10, proc)
        task.start()
        try:
            self.assertIsNotNone(queue.flush())
        finally:
            task.cancel()
            task.join()
Ejemplo n.º 6
0
class LocalFileStorage(object):
    def __init__(
            self,
            path,
            max_size=100 * 1024 * 1024,  # 100MB
            maintenance_period=60,  # 1 minute
            retention_period=7 * 24 * 60 * 60,  # 7 days
            write_timeout=60,  # 1 minute
    ):
        self.path = os.path.abspath(path)
        self.max_size = max_size
        self.maintenance_period = maintenance_period
        self.retention_period = retention_period
        self.write_timeout = write_timeout
        self._maintenance_routine(silent=False)
        self._maintenance_task = PeriodicTask(
            interval=self.maintenance_period,
            function=self._maintenance_routine,
            kwargs={'silent': True},
        )
        self._maintenance_task.daemon = True
        self._maintenance_task.start()

    def close(self):
        self._maintenance_task.cancel()
        self._maintenance_task.join()

    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.close()

    def _maintenance_routine(self, silent=False):
        try:
            if not os.path.isdir(self.path):
                os.makedirs(self.path)
        except Exception:
            if not silent:
                raise
        try:
            for blob in self.gets():
                pass
        except Exception:
            if not silent:
                raise

    def gets(self):
        now = _now()
        lease_deadline = _fmt(now)
        retention_deadline = _fmt(now - _seconds(self.retention_period))
        timeout_deadline = _fmt(now - _seconds(self.write_timeout))
        for name in sorted(os.listdir(self.path)):
            path = os.path.join(self.path, name)
            if not os.path.isfile(path):
                continue  # skip if not a file
            if path.endswith('.tmp'):
                if name < timeout_deadline:
                    try:
                        os.remove(path)  # TODO: log data loss
                    except Exception:
                        pass  # keep silent
            if path.endswith('.lock'):
                if path[path.rindex('@') + 1:-5] > lease_deadline:
                    continue  # under lease
                new_path = path[:path.rindex('@')]
                try:
                    os.rename(path, new_path)
                except Exception:
                    continue  # keep silent
                path = new_path
            if path.endswith('.blob'):
                if name < retention_deadline:
                    try:
                        os.remove(path)  # TODO: log data loss
                    except Exception:
                        pass  # keep silent
                else:
                    yield LocalFileBlob(path)

    def get(self):
        cursor = self.gets()
        try:
            return next(cursor)
        except StopIteration:
            pass
        return None

    def put(self, data, lease_period=0, silent=False):
        blob = LocalFileBlob(
            os.path.join(
                self.path,
                '{}-{}.blob'.format(
                    _fmt(_now()),
                    '{:08x}'.format(
                        random.getrandbits(32)),  # thread-safe random
                ),
            ))
        return blob.put(data, lease_period=lease_period, silent=silent)
Ejemplo n.º 7
0
class LocalFileStorage(object):
    def __init__(
            self,
            path,
            max_size=50 * 1024 * 1024,  # 50MiB
            maintenance_period=60,  # 1 minute
            retention_period=7 * 24 * 60 * 60,  # 7 days
            write_timeout=60,  # 1 minute
    ):
        self.path = os.path.abspath(path)
        self.max_size = max_size
        self.maintenance_period = maintenance_period
        self.retention_period = retention_period
        self.write_timeout = write_timeout
        self._maintenance_routine(silent=False)
        self._maintenance_task = PeriodicTask(
            interval=self.maintenance_period,
            function=self._maintenance_routine,
            kwargs={'silent': True},
        )
        self._maintenance_task.daemon = True
        self._maintenance_task.start()

    def close(self):
        self._maintenance_task.cancel()
        self._maintenance_task.join()

    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.close()

    def _maintenance_routine(self, silent=False):
        try:
            if not os.path.isdir(self.path):
                os.makedirs(self.path)
        except Exception:
            if not silent:
                raise
        try:
            for blob in self.gets():
                pass
        except Exception:
            if not silent:
                raise

    def gets(self):
        now = _now()
        lease_deadline = _fmt(now)
        retention_deadline = _fmt(now - _seconds(self.retention_period))
        timeout_deadline = _fmt(now - _seconds(self.write_timeout))
        for name in sorted(os.listdir(self.path)):
            path = os.path.join(self.path, name)
            if not os.path.isfile(path):
                continue  # skip if not a file
            if path.endswith('.tmp'):
                if name < timeout_deadline:
                    try:
                        os.remove(path)  # TODO: log data loss
                    except Exception:
                        pass  # keep silent
            if path.endswith('.lock'):
                if path[path.rindex('@') + 1:-5] > lease_deadline:
                    continue  # under lease
                new_path = path[:path.rindex('@')]
                try:
                    os.rename(path, new_path)
                except Exception:
                    continue  # keep silent
                path = new_path
            if path.endswith('.blob'):
                if name < retention_deadline:
                    try:
                        os.remove(path)  # TODO: log data loss
                    except Exception:
                        pass  # keep silent
                else:
                    yield LocalFileBlob(path)

    def get(self):
        cursor = self.gets()
        try:
            return next(cursor)
        except StopIteration:
            pass
        return None

    def put(self, data, lease_period=0, silent=False):
        if not self._check_storage_size():
            return None
        blob = LocalFileBlob(
            os.path.join(
                self.path,
                '{}-{}.blob'.format(
                    _fmt(_now()),
                    '{:08x}'.format(
                        random.getrandbits(32)),  # thread-safe random
                ),
            ))
        return blob.put(data, lease_period=lease_period, silent=silent)

    def _check_storage_size(self):
        size = 0
        for dirpath, dirnames, filenames in os.walk(self.path):
            for f in filenames:
                fp = os.path.join(dirpath, f)
                # skip if it is symbolic link
                if not os.path.islink(fp):
                    try:
                        size += os.path.getsize(fp)
                    except OSError:
                        logger.error(
                            "Path %s does not exist or is inaccessible.", fp)
                        continue
                    if size >= self.max_size:
                        logger.warning(
                            "Persistent storage max capacity has been "
                            "reached. Currently at %fKB. Telemetry will be "
                            "lost. Please consider increasing the value of "
                            "'storage_max_size' in exporter config.",
                            format(size / 1024))
                        return False
        return True
Ejemplo n.º 8
0
class AzureExporter(base_exporter.Exporter):
    """An exporter that sends traces to Microsoft Azure Monitor.

    :type options: dict
    :param options: Options for the exporter. Defaults to None.
    """

    def __init__(self, **options):
        self.options = Options(**options)
        if not self.options.instrumentation_key:
            raise ValueError('The instrumentation_key is not provided.')
        self.storage = LocalFileStorage(
            path=self.options.storage_path,
            max_size=self.options.storage_max_size,
            maintenance_period=self.options.storage_maintenance_period,
            retention_period=self.options.storage_retention_period,
        )
        self.transport = AsyncTransport(
            self,
            max_batch_size=100,
            wait_period=self.options.export_interval,
        )
        self._transmission_task = PeriodicTask(
            interval=self.options.storage_maintenance_period,
            function=self._transmission_routine,
        )
        self._transmission_task.daemon = True
        self._transmission_task.start()

    def span_data_to_envelope(self, sd):
        envelope = Envelope(
            iKey=self.options.instrumentation_key,
            tags=dict(utils.azure_monitor_context),
            time=sd.start_time,
        )
        envelope.tags['ai.operation.id'] = sd.context.trace_id
        if sd.parent_span_id:
            envelope.tags['ai.operation.parentId'] = '|{}.{}.'.format(
                sd.context.trace_id,
                sd.parent_span_id,
            )
        if sd.span_kind == SpanKind.SERVER:
            envelope.name = 'Microsoft.ApplicationInsights.Request'
            data = Request(
                id='|{}.{}.'.format(sd.context.trace_id, sd.span_id),
                duration=utils.timestamp_to_duration(
                    sd.start_time,
                    sd.end_time,
                ),
                responseCode='0',  # TODO
                success=True,  # TODO
            )
            envelope.data = Data(baseData=data, baseType='RequestData')
            if 'http.method' in sd.attributes:
                data.name = sd.attributes['http.method']
            if 'http.url' in sd.attributes:
                data.name = data.name + ' ' + sd.attributes['http.url']
                data.url = sd.attributes['http.url']
            if 'http.status_code' in sd.attributes:
                data.responseCode = str(sd.attributes['http.status_code'])
        else:
            envelope.name = \
                'Microsoft.ApplicationInsights.RemoteDependency'
            data = RemoteDependency(
                name=sd.name,  # TODO
                id='|{}.{}.'.format(sd.context.trace_id, sd.span_id),
                resultCode='0',  # TODO
                duration=utils.timestamp_to_duration(
                    sd.start_time,
                    sd.end_time,
                ),
                success=True,  # TODO
            )
            envelope.data = Data(
                baseData=data,
                baseType='RemoteDependencyData',
            )
            if sd.span_kind == SpanKind.CLIENT:
                data.type = 'HTTP'  # TODO
                if 'http.url' in sd.attributes:
                    url = sd.attributes['http.url']
                    # TODO: error handling, probably put scheme as well
                    data.name = utils.url_to_dependency_name(url)
                if 'http.status_code' in sd.attributes:
                    data.resultCode = str(sd.attributes['http.status_code'])
            else:
                data.type = 'INPROC'
        # TODO: links, tracestate, tags, attrs
        return envelope

    def _transmission_routine(self):
        for blob in self.storage.gets():
            if blob.lease(self.options.timeout + 5):
                envelopes = blob.get()  # TODO: handle error
                result = self._transmit(envelopes)
                if result > 0:
                    blob.lease(result)
                else:
                    blob.delete(silent=True)

    def _transmit(self, envelopes):
        """
        Transmit the data envelopes to the ingestion service.
        Return a negative value for partial success or non-retryable failure.
        Return 0 if all envelopes have been successfully ingested.
        Return the next retry time in seconds for retryable failure.
        This function should never throw exception.
        """
        if not envelopes:
            return 0
        # TODO: prevent requests being tracked
        blacklist_hostnames = execution_context.get_opencensus_attr(
            'blacklist_hostnames',
        )
        execution_context.set_opencensus_attr(
            'blacklist_hostnames',
            ['dc.services.visualstudio.com'],
        )
        try:
            response = requests.post(
                url=self.options.endpoint,
                data=json.dumps(envelopes),
                headers={
                    'Accept': 'application/json',
                    'Content-Type': 'application/json; charset=utf-8',
                },
                timeout=self.options.timeout,
            )
        except Exception as ex:  # TODO: consider RequestException
            logger.warning('Transient client side error %s.', ex)
            # client side error (retryable)
            return self.options.minimum_retry_interval
        finally:
            execution_context.set_opencensus_attr(
                'blacklist_hostnames',
                blacklist_hostnames,
            )
        text = 'N/A'
        data = None
        try:
            text = response.text
        except Exception as ex:
            logger.warning('Error while reading response body %s.', ex)
        else:
            try:
                data = json.loads(text)
            except Exception:
                pass
        if response.status_code == 200:
            logger.info('Transmission succeeded: %s.', text)
            return 0
        if response.status_code == 206:  # Partial Content
            # TODO: store the unsent data
            if data:
                try:
                    resend_envelopes = []
                    for error in data['errors']:
                        if error['statusCode'] in (
                                429,  # Too Many Requests
                                500,  # Internal Server Error
                                503,  # Service Unavailable
                        ):
                            resend_envelopes.append(envelopes[error['index']])
                        else:
                            logger.error(
                                'Data drop %s: %s %s.',
                                error['statusCode'],
                                error['message'],
                                envelopes[error['index']],
                            )
                    if resend_envelopes:
                        self.storage.put(resend_envelopes)
                except Exception as ex:
                    logger.error(
                        'Error while processing %s: %s %s.',
                        response.status_code,
                        text,
                        ex,
                    )
                return -response.status_code
            # cannot parse response body, fallback to retry
        if response.status_code in (
                206,  # Partial Content
                429,  # Too Many Requests
                500,  # Internal Server Error
                503,  # Service Unavailable
        ):
            logger.warning(
                'Transient server side error %s: %s.',
                response.status_code,
                text,
            )
            # server side error (retryable)
            return self.options.minimum_retry_interval
        logger.error(
            'Non-retryable server side error %s: %s.',
            response.status_code,
            text,
        )
        # server side error (non-retryable)
        return -response.status_code

    def emit(self, span_datas):
        """
        :type span_datas: list of :class:
            `~opencensus.trace.span_data.SpanData`
        :param list of opencensus.trace.span_data.SpanData span_datas:
            SpanData tuples to emit
        """
        envelopes = [self.span_data_to_envelope(sd) for sd in span_datas]
        result = self._transmit(envelopes)
        if result > 0:
            self.storage.put(envelopes, result)

    def export(self, span_datas):
        """
        :type span_datas: list of :class:
            `~opencensus.trace.span_data.SpanData`
        :param list of opencensus.trace.span_data.SpanData span_datas:
            SpanData tuples to export
        """
        self.transport.export(span_datas)