def get_tracer(self, component_name="AppLogger", parent_tracer=None):
        """Get Tracer Object.

        Args:
            component_name (str, optional): Name of logger. Defaults to "AppLogger".
            parent_tracer([opencensus.trace.tracer], optional):
                Contains parent tracer required for setting coorelation.

        Returns:
            opencensus.trace.tracer: A Tracer.
        """
        self.update_config(self.config)
        sampler = AlwaysOnSampler()
        exporter = self.get_log_exporter(component_name)
        if self.config.get("logging_enabled") != "true":
            sampler = AlwaysOffSampler()
        if parent_tracer is None:
            tracer = Tracer(exporter=exporter, sampler=sampler)
        else:
            tracer = Tracer(
                span_context=parent_tracer.span_context,
                exporter=exporter,
                sampler=sampler,
            )
        return tracer
    def __init__(self, run):
        print('Initializing the AppInsightsLogger')
        self.env = Env()
        self.run_id = self.get_run_id_and_set_context(run)

        # Prepare integrations and initialize tracer
        config_integration.trace_integrations(['httplib', 'logging'])
        texporter = AzureExporter(
            connection_string=self.env.app_insights_connection_string)
        texporter.add_telemetry_processor(self.callback_function)
        self.tracer = Tracer(exporter=texporter,
                             sampler=ProbabilitySampler(
                                 self.env.trace_sampling_rate))

        # Create AppInsights Handler and set log format
        self.logger = logging.getLogger(__name__)
        self.logger.setLevel(
            getattr(logging, self.env.log_level.upper(), "WARNING"))
        handler = AzureLogHandler(
            connection_string=self.env.app_insights_connection_string,
            logging_sampling_rate=self.env.log_sampling_rate,
        )
        handler.add_telemetry_processor(self.callback_function)
        self.logger.addHandler(handler)

        # initializes metric exporter
        mexporter = metrics_exporter.new_metrics_exporter(
            enable_standard_metrics=self.env.enable_standard_metrics,
            export_interval=self.env.metrics_export_interval,
            connection_string=self.env.app_insights_connection_string,
        )
        mexporter.add_telemetry_processor(self.callback_function)
        stats_module.stats.view_manager.register_exporter(mexporter)
Exemplo n.º 3
0
def main():
    sampler = always_on.AlwaysOnSampler()
    exporter = print_exporter.PrintExporter()
    #tracer = Tracer(sampler=sampler, exporter=exporter)
    je = JaegerExporter(service_name="pitoncito",
                        host_name='jaeger-server',
                        port=9411,
                        endpoint='/api/traces')
    tracer = Tracer(exporter=je, sampler=always_on.AlwaysOnSampler())

    with tracer.span(name='root'):
        tracer.add_attribute_to_current_span(attribute_key='miclave',
                                             attribute_value='mivalor')
        function_to_trace()
        with tracer.span(name='child'):
            function_to_trace()

    # Get the current tracer
    tracer = execution_context.get_opencensus_tracer()

    # Explicitly create spans
    tracer.start_span()

    # Get current span
    execution_context.get_current_span()

    # Explicitly end span
    tracer.end_span()
Exemplo n.º 4
0
def process(auth_context, form):
    """
    View function for processing charges.

    Parameters:
       auth_context (dict): The authentication context of request.
                            See middlewares/auth.py for more information.
       form (CheckOutForm): A validated checkout form.
                            See middlewares/form_validation.py for more
                            information.
    Output:
       Rendered HTML page.
    """

    # Create an OpenCensus tracer to trace each payment process, and export
    # the data to Stackdriver Tracing.
    tracer = Tracer(exporter=sde)
    trace_id = tracer.span_context.trace_id

    # Prepare the order
    with tracer.span(name="prepare_order_info"):
        product_ids = form.product_ids.data
        stripe_token = form.stripeToken.data
        shipping = orders.Shipping(address_1=form.address_1.data,
                                   address_2=form.address_2.data,
                                   city=form.city.data,
                                   state=form.state.data,
                                   zip_code=form.zip_code.data,
                                   email=form.email.data,
                                   mobile=form.mobile.data)
        amount = product_catalog.calculate_total_price(product_ids)
        order = orders.Order(amount=amount,
                             shipping=shipping,
                             status="order_created",
                             items=product_ids)
        order_id = orders.add_order(order)

    # Stream a Payment event
    with tracer.span(name="send_payment_event"):
        if stripe_token:
            # Publish an event to the topic for new payments.
            # Cloud Function pay_with_stripe subscribes to the topic and
            # processes the payment using the Stripe API upon arrival of new
            # events.
            # Cloud Function streamEvents (or App Engine service stream-event)
            # subscribes to the topic and saves the event to BigQuery for
            # data analytics upon arrival of new events.
            eventing.stream_event(
                topic_name=PUBSUB_TOPIC_PAYMENT_PROCESS,
                event_type='order_created',
                event_context={
                    'order_id': order_id,
                    'token': stripe_token,
                    # Pass the trace ID in the event so that Cloud Function
                    # pay_with_stripe can continue the trace.
                    'trace_id': trace_id
                })

    return render_template("charge.html", auth_context=auth_context)
Exemplo n.º 5
0
def sleep(request: InvokeMethodRequest) -> InvokeMethodResponse:
    tracer = Tracer(sampler=AlwaysOnSampler())
    with tracer.span(name='sleep') as _:
        time.sleep(2)
        print(request.metadata, flush=True)
        print(request.text(), flush=True)

        return InvokeMethodResponse(b'SLEEP', "text/plain; charset=UTF-8")
Exemplo n.º 6
0
def say(request: InvokeMethodRequest) -> InvokeMethodResponse:
    tracer = Tracer(sampler=AlwaysOnSampler())
    with tracer.span(name='say') as span:
        data = request.text()
        span.add_annotation('Request length', len=len(data))
        print(request.metadata, flush=True)
        print(request.text(), flush=True)

        return InvokeMethodResponse(b'SAY', "text/plain; charset=UTF-8")
    def __init__(self, flask_app, logger):
        self.app = flask_app
        self.log = logger
        self.api = Api(self.app)
        self.is_terminating = False
        self.func_properties = {}
        self.func_request_counts = {}
        self.api_prefix = getenv('API_PREFIX')
        self.tracer = None

        self.api_task_manager = TaskManager()
        signal.signal(signal.SIGINT, self.initialize_term)

        # Add health check endpoint
        self.app.add_url_rule(self.api_prefix + '/',
                              view_func=self.health_check,
                              methods=['GET'])
        print("Adding url rule: " + self.api_prefix + '/')
        # Add task endpoint
        self.api.add_resource(
            Task,
            self.api_prefix + '/task/<id>',
            resource_class_kwargs={'task_manager': self.api_task_manager})
        print("Adding url rule: " + self.api_prefix + '/task/<int:taskId>')

        if getenv('APPINSIGHTS_INSTRUMENTATIONKEY', None):
            azure_exporter = AzureExporter(
                connection_string='InstrumentationKey=' +
                str(getenv('APPINSIGHTS_INSTRUMENTATIONKEY')),
                timeout=getenv('APPINSIGHTS_TIMEOUT', 30.0))

            sampling_rate = getenv('TRACE_SAMPLING_RATE', None)
            if not sampling_rate:
                sampling_rate = 1.0

            self.middleware = FlaskMiddleware(
                self.app,
                exporter=azure_exporter,
                sampler=ProbabilitySampler(rate=float(sampling_rate)),
            )

            self.tracer = Tracer(
                exporter=AzureExporter(
                    connection_string='InstrumentationKey=' +
                    str(getenv('APPINSIGHTS_INSTRUMENTATIONKEY')),
                    timeout=getenv('APPINSIGHTS_TIMEOUT', 30.0)),
                sampler=ProbabilitySampler(rate=float(sampling_rate)),
            )

        self.app.before_request(self.before_request)
Exemplo n.º 8
0
def trace(name, service_name='snoop'):
    """Context manager to run a trace.

    Will set the required context execution parents."""

    tracer = Tracer(exporter=get_exporter(service_name), sampler=sampler)
    try:
        # if there is another trace on the stack, mask it, so this trace's
        # spans don't interfere with it
        with set_parent(None):
            with set_parent(tracer):
                with span(name):
                    yield
    finally:
        tracer.finish()
Exemplo n.º 9
0
    def pre_invocation_app_level(cls,
                                 logger,
                                 context,
                                 func_args={},
                                 *args,
                                 **kwargs):
        """An implementation of pre invocation hooks on Function App's level.
        The Python Worker Extension Interface is defined in
        https://github.com/Azure/azure-functions-python-library/
        blob/dev/azure/functions/extension/app_extension_base.py
        """
        if not cls._exporter:
            logger.warning(
                'Please call OpenCensusExtension.configure() after the import '
                'statement to ensure AzureExporter is setup correctly.')
            return

        span_context = TraceContextPropagator().from_headers({
            "traceparent":
            context.trace_context.Traceparent,
            "tracestate":
            context.trace_context.Tracestate
        })

        tracer = Tracer(span_context=span_context,
                        exporter=cls._exporter,
                        sampler=ProbabilitySampler(1.0))

        setattr(context, 'tracer', tracer)
Exemplo n.º 10
0
def initialize_logging(logging_level: int, correlation_id: str = None) -> logging.LoggerAdapter:
    """
    Adds the Application Insights handler for the root logger and sets the given logging level.
    Creates and returns a logger adapter that integrates the correlation ID, if given, to the log messages.

    :param logging_level: The logging level to set e.g., logging.WARNING.
    :param correlation_id: Optional. The correlation ID that is passed on to the operation_Id in App Insights.
    :returns: A newly created logger adapter.
    """
    logger = logging.getLogger()
    logger.addHandler(logging.StreamHandler())  # For logging into console
    app_insights_connection_string = os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING")

    try:
        logger.addHandler(AzureLogHandler(connection_string=app_insights_connection_string))
    except ValueError as e:
        logger.error(f"Failed to set Application Insights logger handler: {e}")

    config_integration.trace_integrations(['logging'])
    logging.basicConfig(level=logging_level, format='%(asctime)s traceId=%(traceId)s spanId=%(spanId)s %(message)s')
    Tracer(sampler=AlwaysOnSampler())
    logger.setLevel(logging_level)

    extra = None

    if correlation_id:
        extra = {'traceId': correlation_id}

    adapter = logging.LoggerAdapter(logger, extra)
    adapter.debug(f"Logger adapter initialized with extra: {extra}")

    return adapter
Exemplo n.º 11
0
async def middlewareOpencensus(
    request: Request, call_next
):  # The middleware function recieves: The request, A function call_next that will recieve the request as a parameter
    tracer = Tracer(exporter=AzureExporter(
        connection_string=f'InstrumentationKey={APPINSIGHTS_INSTRUMENTATIONKEY}'
    ),
                    sampler=ProbabilitySampler(1.0))
    # https://opencensus.io/tracing/span/kind/#2
    # SpanKind details the relationships between spans in addition to the parent/child relationship.
    # Spand kind: 0 = UNSEPCIFIED, 1 = SERVER, 2 = CLIENT
    # Detail explaination of Span : https://opencensus.io/tracing/span/
    with tracer.span("main") as span:
        span.span_kind = SpanKind.SERVER

        response = await call_next(
            request
        )  # call next will pass the request to the corresponding path operation, then returns the response genrated by the corresponding path operation

        tracer.add_attribute_to_current_span(  # current span is a SERVER
            attribute_key=HTTP_STATUS_CODE,  # E.g. 202, 201, 404, 405, 505
            attribute_value=response.status_code)
        tracer.add_attribute_to_current_span(attribute_key=HTTP_URL,
                                             attribute_value=str(request.url))

    return response
Exemplo n.º 12
0
    def __init__(self, trace_name):
        sampler = samplers.AlwaysOnSampler()

        je = JaegerExporter(service_name=trace_name,
                            host_name="localhost",
                            agent_port=6831,
                            endpoint="/api/traces")

        self.tracer = Tracer(sampler=sampler, exporter=je)
Exemplo n.º 13
0
    async def dispatch(self, request: Request, call_next):
        propagator = TraceContextPropagator()
        span_context = propagator.from_headers(dict(request.headers))

        tracer = Tracer(exporter=self.exporter,
                        sampler=self.sampler,
                        span_context=span_context,
                        propagator=propagator)

        try:
            # tracer.span_context.trace_options.set_enabled(True)

            with tracer.span(f"[{request.method}] {request.url}") as span:
                span.span_kind = SpanKind.SERVER
                # if "traceparent" not in request.headers:
                #     trace_ctx = span.context_tracer
                #     trace_options = tracer.span_context.trace_options.trace_options_byte
                #     trace_id = trace_ctx.trace_id
                #     trace_parent = f"00-{trace_id}-{span.span_id}-0{trace_options}"
                # else:
                #     trace_parent = request.headers['traceparent']

                span.add_attribute(HTTP_URL, str(request.url))
                span.add_attribute(HTTP_HOST, request.url.hostname)
                span.add_attribute(HTTP_METHOD, request.method)
                span.add_attribute(HTTP_PATH, request.url.path)
                span.add_attribute(HTTP_ROUTE, request.url.path)
                span.add_attribute("x_forwarded_host",
                                   request.headers.get("x_forwarded_host"))

                for key, value in self.extra_attrs.items():
                    span.add_attribute(key, value)

                response = await call_next(request)
                # response.headers['traceparent'] = trace_parent

                span.add_attribute(HTTP_STATUS_CODE, response.status_code)

            return response

        except Exception as err:
            logger.error(err, exc_info=True)
        finally:
            tracer.finish()
Exemplo n.º 14
0
def main():
    exporter = stackdriver_exporter.StackdriverExporter()
    tracer = Tracer(exporter=exporter)
    tracer_interceptor = client_interceptor.OpenCensusClientInterceptor(
        tracer, host_port=HOST_PORT)
    channel = grpc.insecure_channel(HOST_PORT)
    channel = grpc.intercept_channel(channel, tracer_interceptor)
    stub = hello_world_pb2_grpc.GreeterStub(channel)
    response = stub.SayHello(hello_world_pb2.HelloRequest(name='you'))
    print("Message received: " + response.message)
Exemplo n.º 15
0
    def Echo(self, request, context):
        metadata = context.invocation_metadata()
        logging.debug("Echo metadata: " + str(metadata))
        metadata_dict = GetTracingMetadata(context)
        logging.debug("Metadata dict: " + str(metadata_dict))

        if 'x-b3-traceid' in metadata_dict:
            trace_id = metadata_dict['x-b3-traceid']
            logging.debug("Trace ID: " + trace_id)
            span_context = SpanContext(trace_id=trace_id,
                                       span_id=metadata_dict['x-b3-spanid'])
            tracer = Tracer(span_context=span_context,
                            exporter=exporter,
                            sampler=always_on.AlwaysOnSampler())
            with tracer.span(name='echo') as span:
                logging.debug("Processing Echo: " + str(request))
                span.add_attribute("message", request.message)
                time.sleep(0.2)
        return echo_pb2.EchoResponse(message=request.message)
Exemplo n.º 16
0
def pay_with_stripe(data, context):
    tracer = Tracer(exporter=sde)

    if 'data' in data:
        payment_request_json = base64.b64decode(data.get('data')).decode()
        payment_request = json.loads(payment_request_json)
        token = payment_request.get('event_context').get('token')
        order_id = payment_request.get('event_context').get('order_id')
        trace_id = payment_request.get('event_context').get('trace_id')
        tracer.span_context.trace_id = trace_id

        with tracer.span(name="process_payment"):
            order_data = firestore.collection('orders').document(
                order_id).get().to_dict()
            amount = order_data.get('amount')
            email = order_data.get('shipping').get('email')

            try:
                charge = stripe.Charge.create(
                    # For US Dollars, Stripe use Cent as the unit
                    amount=int(amount * 100),
                    currency='usd',
                    description='Example charge',
                    source=token)
                order_data['status'] = 'payment_processed'
                event_type = 'payment_processed'

            except stripe.error.StripeError as err:
                print(err)
                order_data['status'] = 'payment_failed'
                event_type = 'payment_failed'

            firestore.collection('orders').document(order_id).set(order_data)
            stream_event(topic_name=PUBSUB_TOPIC_PAYMENT_COMPLETION,
                         event_type=event_type,
                         event_context={
                             'order_id': order_id,
                             'email': email,
                             'order': order_data
                         })

    return ''
Exemplo n.º 17
0
def test_get_span_from_thread():

    result = []
    def get_span_from_thread(output):
        current_span = OpenCensusSpan.get_current_span()
        output.append(current_span)

    tracer = Tracer(sampler=AlwaysOnSampler())
    with tracer.span(name="TestSpan") as span:

        thread = threading.Thread(
            target=get_span_from_thread,
            args=(result,)
        )
        thread.start()
        thread.join()

        assert span is result[0]

    execution_context.clear()
    def test_invoke_method_with_tracer(self):
        tracer = Tracer(sampler=samplers.AlwaysOnSampler(),
                        exporter=print_exporter.PrintExporter())

        self.client = DaprClient(headers_callback=lambda: tracer.propagator.
                                 to_headers(tracer.span_context))
        self.server.set_response(b"FOO")

        with tracer.span(name="test"):
            req = common_v1.StateItem(key='test')
            resp = self.client.invoke_method(
                self.app_id,
                self.method_name,
                http_verb='PUT',
                data=req,
            )

        request_headers = self.server.get_request_headers()

        self.assertIn('Traceparent', request_headers)
        self.assertEqual(b'FOO', resp.data)
Exemplo n.º 19
0
    async def dispatch(self, request: Request, call_next):
        if (request.app.extra.get('extra', {}).get('open-census-settings',
                                                   {})):
            settings = request.app.extra['extra']['open-census-settings']
            self.load_config(settings=settings)

        if hasattr(request.app, 'trace_exporter'):
            self.exporter = request.app.trace_exporter

        span_context = self.propagator.from_headers(request.headers)
        tracer = Tracer(span_context=span_context,
                        sampler=self.sampler,
                        propagator=self.propagator,
                        exporter=self.exporter)
        with tracer.span("main") as span:
            span.span_kind = SpanKind.SERVER

            self._before_request(request, tracer)
            response = await call_next(request)
            self._after_request(response, tracer)

        return response
def create_trace_dependency(name=name,
                            message=message,
                            message_before=message_before,
                            message_after=message_after):
    config_integration.trace_integrations(['logging'])

    logger = logging.getLogger(__name__)

    handler = AzureLogHandler(connection_string='InstrumentationKey=%s' %
                              (instrument_key))
    handler.setFormatter(
        logging.Formatter('%(traceId)s %(spanId)s %(message)s'))
    logger.addHandler(handler)

    tracer = Tracer(exporter=AzureExporter(
        connection_string='InstrumentationKey=%s' % (instrument_key)),
                    sampler=ProbabilitySampler(1.0))

    logger.warning(message_before)
    with tracer.span(name=name):
        logger.warning(message)
    logger.warning(message_after)
Exemplo n.º 21
0
def main():
    sampler = always_on.AlwaysOnSampler()
    exporter = print_exporter.PrintExporter()
    tracer = Tracer(sampler=sampler, exporter=exporter)

    with tracer.span(name='root'):
        tracer.add_attribute_to_current_span(attribute_key='example key',
                                             attribute_value='example value')
        function_to_trace()
        with tracer.span(name='child'):
            function_to_trace()

    # Get the current tracer
    tracer = execution_context.get_opencensus_tracer()

    # Explicitly create spans
    tracer.start_span()

    # Get current span
    execution_context.get_current_span()

    # Explicitly end span
    tracer.end_span()
Exemplo n.º 22
0
def create_order(product_ids, stripe_token, shipping):
    # Create an OpenCensus tracer to trace each payment process, and export
    # the data to Stackdriver Tracing.
    tracer = Tracer(exporter=sde)
    trace_id = tracer.span_context.trace_id

    # Prepare the order
    with tracer.span(name="prepare_order_info"):
        amount = product_catalog.calculate_total_price(product_ids)
        order = orders.Order(amount=amount,
                             shipping=shipping,
                             status="order_created",
                             items=product_ids)
        order_id = orders.add_order(order)

    # Stream a Payment event
    with tracer.span(name="send_payment_event"):
        if stripe_token:
            # Publish an event to the topic for new payments.
            # Cloud Function pay_with_stripe subscribes to the topic and
            # processes the payment using the Stripe API upon arrival of new
            # events.
            # Cloud Function streamEvents (or App Engine service stream-event)
            # subscribes to the topic and saves the event to BigQuery for
            # data analytics upon arrival of new events.
            eventing.stream_event(
                topic_name=PUBSUB_TOPIC_PAYMENT_PROCESS,
                event_type='order_created',
                event_context={
                    'order_id': order_id,
                    'token': stripe_token,
                    # Pass the trace ID in the event so that Cloud Function
                    # pay_with_stripe can continue the trace.
                    'trace_id': trace_id,
                    'email': shipping.email
                }
            )
Exemplo n.º 23
0
def initialize_logging(logging_level: int, correlation_id: str, add_console_handler: bool = False) -> logging.LoggerAdapter:
    """
    Adds the Application Insights handler for the root logger and sets the given logging level.
    Creates and returns a logger adapter that integrates the correlation ID, if given, to the log messages.
    Note: This should be called only once, otherwise duplicate log entries could be produced.

    :param logging_level: The logging level to set e.g., logging.WARNING.
    :param correlation_id: Optional. The correlation ID that is passed on to the operation_Id in App Insights.
    :returns: A newly created logger adapter.
    """
    logger = logging.getLogger()

    # When using sessions and NEXT_AVAILABLE_SESSION we see regular exceptions which are actually expected
    # See https://github.com/Azure/azure-sdk-for-python/issues/9402
    # Other log entries such as 'link detach' also confuse the logs, and are expected.
    # We don't want these making the logs any noisier so we raise the logging level for that logger here
    # To inspect all the loggers, use -> loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict]
    for logger_name in LOGGERS_FOR_ERRORS_ONLY:
        logging.getLogger(logger_name).setLevel(logging.ERROR)

    if add_console_handler:
        console_formatter = logging.Formatter(fmt='%(module)-7s %(name)-7s %(process)-7s %(asctime)s %(levelname)-7s %(message)s')
        console_handler = logging.StreamHandler()
        console_handler.setFormatter(console_formatter)
        logger.addHandler(console_handler)

    try:
        azurelog_formatter = AzureLogFormatter()
        # picks up APPLICATIONINSIGHTS_CONNECTION_STRING automatically
        azurelog_handler = AzureLogHandler()
        azurelog_handler.setFormatter(azurelog_formatter)
        logger.addHandler(azurelog_handler)
    except ValueError as e:
        logger.error(f"Failed to set Application Insights logger handler: {e}")

    config_integration.trace_integrations(['logging'])
    logging.basicConfig(level=logging_level, format='%(asctime)s traceId=%(traceId)s spanId=%(spanId)s %(message)s')
    Tracer(sampler=AlwaysOnSampler())
    logger.setLevel(logging_level)

    extra = None

    if correlation_id:
        extra = {'traceId': correlation_id}

    adapter = logging.LoggerAdapter(logger, extra)
    adapter.debug(f"Logger adapter initialized with extra: {extra}")

    return adapter
Exemplo n.º 24
0
    def __init__(
        self,
        study_id: str,
        study_file_id: str,
        matrix_file: str = None,
        matrix_file_type: str = None,
        cell_metadata_file: str = None,
        cluster_file: str = None,
        subsample=False,
        ingest_cell_metadata=False,
        ingest_cluster=False,
        **kwargs,
    ):
        """Initializes variables in Ingest Pipeline"""
        self.study_id = study_id
        self.study_file_id = study_file_id
        self.matrix_file = matrix_file
        self.matrix_file_type = matrix_file_type
        if os.environ.get("DATABASE_HOST") is not None:
            # Needed to run tests in CircleCI.
            # TODO (SCP-2000): Integrate MongoDB emulator to test_ingest.py, then remove this
            self.db = self.get_mongo_db()
        else:
            self.db = None
        self.cluster_file = cluster_file
        self.kwargs = kwargs
        self.cell_metadata_file = cell_metadata_file
        if "GOOGLE_CLOUD_PROJECT" in os.environ:
            # instantiate trace exporter
            exporter = StackdriverExporter(
                project_id=os.environ["GOOGLE_CLOUD_PROJECT"])
            self.tracer = Tracer(exporter=exporter, sampler=AlwaysOnSampler())

        else:
            self.tracer = nullcontext()
        if ingest_cell_metadata:
            self.cell_metadata = self.initialize_file_connection(
                "cell_metadata", cell_metadata_file)
        if ingest_cluster:
            self.cluster = self.initialize_file_connection(
                "cluster", cluster_file)
        if subsample:
            self.cluster_file = cluster_file
            self.cell_metadata_file = cell_metadata_file
Exemplo n.º 25
0
    def pre_invocation_app_level(cls,
                                 logger: Logger,
                                 context: Context,
                                 func_args: Dict[str, object] = {},
                                 *args,
                                 **kwargs) -> None:
        if not cls._has_configure_called:
            raise FunctionExtensionException(
                'Please ensure OpenCensusExtension.configure() is called '
                'after the import OpenCensusExtension statement.')

        span_context = TraceContextPropagator().from_headers({
            "traceparent":
            context.trace_context.Traceparent,
            "tracestate":
            context.trace_context.Tracestate
        })

        tracer = Tracer(span_context=span_context,
                        exporter=cls._exporter,
                        sampler=ProbabilitySampler(1.0))

        cls._tracers[context.function_name] = tracer
        setattr(context, 'tracer', tracer)
        'InstrumentationKey=bdba70b7-8e6e-4adb-adb5-8fd02df32aea;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/'
    ))
logger.setLevel(logging.INFO)

# Metrics
exporter = metrics_exporter.new_metrics_exporter(
    enable_standard_metrics=True,
    connection_string=
    'InstrumentationKey=bdba70b7-8e6e-4adb-adb5-8fd02df32aea;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/'
)

# Tracing
tracer = Tracer(
    exporter=AzureExporter(
        connection_string=
        'InstrumentationKey=bdba70b7-8e6e-4adb-adb5-8fd02df32aea;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/'
    ),
    sampler=ProbabilitySampler(1.0),
)

app = Flask(__name__)

# Requests
middleware = FlaskMiddleware(
    app,
    exporter=AzureExporter(
        connection_string=
        "InstrumentationKey=bdba70b7-8e6e-4adb-adb5-8fd02df32aea;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/"
    ),
    sampler=ProbabilitySampler(rate=1.0),
)
import sys

from opencensus.trace.tracer import Tracer
from opencensus.trace import time_event as time_event_module
from opencensus.ext.zipkin.trace_exporter import ZipkinExporter
from opencensus.trace.samplers import always_on

# 1a. Setup the exporter
ze = ZipkinExporter(service_name="python-quickstart",
                                host_name='localhost',
                                port=9411,
                                endpoint='/api/v2/spans')
# 1b. Set the tracer to use the exporter
# 2. Configure 100% sample rate, otherwise, few traces will be sampled.
# 3. Get the global singleton Tracer object
tracer = Tracer(exporter=ze, sampler=always_on.AlwaysOnSampler())

def main():
    # 4. Create a scoped span. The span will close at the end of the block.
    with tracer.span(name="main") as span:
        for i in range(0, 10):
            doWork()

def doWork():
    # 5. Start another span. Because this is within the scope of the "main" span,
    # this will automatically be a child span.
    with tracer.span(name="doWork") as span:
        print("doing busy work")
        try:
            time.sleep(0.1)
        except:
Exemplo n.º 28
0
    except KeyError:
        logger.info("Profiler disabled.")

    tracer = None
    try:
        if "DISABLE_TRACING" in os.environ:
            raise KeyError()
        else:
            logger.info("Tracing enabled.")
            sampler = always_on.AlwaysOnSampler()
            exporter = stackdriver_exporter.StackdriverExporter(
                project_id=os.environ.get('GCP_PROJECT_ID'),
                transport=AsyncTransport)
            tracer_interceptor = server_interceptor.OpenCensusServerInterceptor(
                sampler, exporter)
            tracer = Tracer(exporter=exporter)

    except (KeyError, DefaultCredentialsError):
        logger.info("Tracing disabled.")
        tracer_interceptor = server_interceptor.OpenCensusServerInterceptor()

    try:
        if "DISABLE_DEBUGGER" in os.environ:
            raise KeyError()
        else:
            logger.info("Debugger enabled.")
            try:
                googleclouddebugger.enable(module='recommendationserver',
                                           version='1.0.0')
            except Exception, err:
                logger.error("Could not enable debugger")
Exemplo n.º 29
0
        connection_string=
        'InstrumentationKey=15d02af3-bb8b-48ab-937e-7f84588dfce0;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/'
    ))

# Metrics
exporter = metrics_exporter.new_metrics_exporter(
    enable_standard_metrics=True,
    connection_string=
    'InstrumentationKey=15d02af3-bb8b-48ab-937e-7f84588dfce0;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/'
)

# Tracing
tracer = Tracer(
    exporter=AzureExporter(
        connection_string=
        'InstrumentationKey=15d02af3-bb8b-48ab-937e-7f84588dfce0;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/'
    ),
    sampler=ProbabilitySampler(1.0),
)

app = Flask(__name__)

# Requests
middleware = FlaskMiddleware(
    app,
    exporter=AzureExporter(
        connection_string=
        "InstrumentationKey=15d02af3-bb8b-48ab-937e-7f84588dfce0;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/"
    ),
    sampler=ProbabilitySampler(rate=1.0),
)
Exemplo n.º 30
0
logger.addHandler(
    AzureEventHandler(
        connection_string=
        'InstrumentationKey=3d3e7bec-d2e0-41b6-b690-7c9cb2020fc2'))

# Metrics
exporter = metrics_exporter.new_metrics_exporter(
    enable_standard_metrics=True,
    connection_string='InstrumentationKey=3d3e7bec-d2e0-41b6-b690-7c9cb2020fc2'
)

# Tracing
tracer = Tracer(
    exporter=AzureExporter(
        connection_string=
        'InstrumentationKey=3d3e7bec-d2e0-41b6-b690-7c9cb2020fc2'),
    sampler=ProbabilitySampler(1.0),
)

app = Flask(__name__)

# Requests
middleware = FlaskMiddleware(
    app,
    exporter=AzureExporter(
        connection_string=
        "InstrumentationKey=3d3e7bec-d2e0-41b6-b690-7c9cb2020fc2"),
    sampler=ProbabilitySampler(rate=1.0),
)

# Load configurations from environment or config file