def test_get_parent_span(self): with ContextHelper(): opencensus = sys.modules["opencensus"] del sys.modules["opencensus"] parent = common.get_parent_span(None) assert parent is None sys.modules["opencensus"] = opencensus parent = common.get_parent_span(None) assert parent.span_instance.name == "azure-sdk-for-python-first_parent_span" tracer = tracer_module.Tracer(sampler=AlwaysOnSampler()) parent = common.get_parent_span(None) assert parent.span_instance.name == "azure-sdk-for-python-first_parent_span" parent.finish() some_span = tracer.start_span(name="some_span") new_parent = common.get_parent_span(None) assert new_parent.span_instance.name == "some_span" some_span.finish() should_be_old_parent = common.get_parent_span(parent.span_instance) assert should_be_old_parent.span_instance == parent.span_instance
envelope.tags['ai.cloud.role'] = PROJECT_NAME handler = AzureLogHandler( connection_string=AZURE_APP_INSIGHTS_CONN_STRING, export_interval=5.0, ) handler.add_telemetry_processor(__telemetry_processor) handler.setLevel(logging.DEBUG) app.logger.addHandler(handler) exporter = AzureExporter(connection_string=AZURE_APP_INSIGHTS_CONN_STRING) exporter.add_telemetry_processor(__telemetry_processor) FlaskMiddleware( app=app, sampler=AlwaysOnSampler(), exporter=exporter, ) csrf = CSRFProtect(app) # -- URLs/routes setup ------------------------------------------------------- app.add_url_rule('/login', 'login', login, methods=['GET', 'POST']) app.add_url_rule('/logout', 'logout', logout, methods=['GET']) app.add_url_rule('/consent', 'consent', consent, methods=['GET', 'POST']) app.add_url_rule('/register', 'register', register, methods=['GET', 'POST']) app.add_url_rule('/verify-email', 'verify-email', verify_email, methods=['GET'])
import json import time from dapr.clients import DaprClient from opencensus.trace.tracer import Tracer from opencensus.trace import time_event as time_event_module from opencensus.ext.zipkin.trace_exporter import ZipkinExporter from opencensus.trace.samplers import AlwaysOnSampler ze = ZipkinExporter(service_name="python-example", host_name='localhost', port=9411, endpoint='/api/v2/spans') tracer = Tracer(exporter=ze, sampler=AlwaysOnSampler()) with tracer.span(name="main") as span: with DaprClient(tracer=tracer) as d: num_messages = 2 for i in range(num_messages): # Create a typed message with content type and body resp = d.invoke_method( 'invoke-receiver', 'say', data=json.dumps({ 'id': i, 'message': 'hello world' }),
def register_azure_exporter_with_tracer(azure_connection_string): tracer = Tracer( exporter=AzureExporter(connection_string=azure_connection_string), sampler=AlwaysOnSampler()) return tracer
def test_distributed_tracing_policy_with_user_agent(): """Test policy working with user agent.""" with ContextHelper(environ={"AZURE_HTTP_USER_AGENT": "mytools"}): exporter = MockExporter() trace = tracer_module.Tracer(sampler=AlwaysOnSampler(), exporter=exporter) with trace.span("parent"): policy = DistributedTracingPolicy() request = HttpRequest("GET", "http://127.0.0.1") request.headers["x-ms-client-request-id"] = "some client request id" pipeline_request = PipelineRequest(request, PipelineContext(None)) user_agent = UserAgentPolicy() user_agent.on_request(pipeline_request) policy.on_request(pipeline_request) response = HttpResponse(request, None) response.headers = request.headers response.status_code = 202 response.headers["x-ms-request-id"] = "some request id" pipeline_response = PipelineResponse(request, response, PipelineContext(None)) ctx = trace.span_context header = trace.propagator.to_headers(ctx) assert request.headers.get("traceparent") == header.get("traceparent") policy.on_response(pipeline_request, pipeline_response) time.sleep(0.001) policy.on_request(pipeline_request) try: raise ValueError("Transport trouble") except: policy.on_exception(pipeline_request) user_agent.on_response(pipeline_request, pipeline_response) trace.finish() exporter.build_tree() parent = exporter.root network_span = parent.children[0] assert network_span.span_data.name == "/" assert network_span.span_data.attributes.get("http.method") == "GET" assert network_span.span_data.attributes.get("component") == "http" assert network_span.span_data.attributes.get("http.url") == "http://127.0.0.1" assert network_span.span_data.attributes.get("http.user_agent").endswith("mytools") assert network_span.span_data.attributes.get("x-ms-request-id") == "some request id" assert network_span.span_data.attributes.get("x-ms-client-request-id") == "some client request id" assert network_span.span_data.attributes.get("http.status_code") == 202 network_span = parent.children[1] assert network_span.span_data.name == "/" assert network_span.span_data.attributes.get("http.method") == "GET" assert network_span.span_data.attributes.get("component") == "http" assert network_span.span_data.attributes.get("http.url") == "http://127.0.0.1" assert network_span.span_data.attributes.get("http.user_agent").endswith("mytools") assert network_span.span_data.attributes.get("x-ms-client-request-id") == "some client request id" assert network_span.span_data.attributes.get("x-ms-request-id") is None assert network_span.span_data.attributes.get("http.status_code") == 504 # Exception should propagate status for Opencensus assert network_span.span_data.status.message == 'Transport trouble'
def test_ensure_exceptions_are_raised_yet_reported(): span_retainer = RetainerTraceExporter() tracer = Tracer(sampler=AlwaysOnSampler(), exporter=span_retainer) execution_context.set_opencensus_tracer(tracer) view_data_retainer = RetainerStatsExporter() view_manager = stats.stats.view_manager view_manager.register_exporter(view_data_retainer) ocredis.register_views() with pytest.raises(Exception): invalid_port = 1<<18 client = ocredis.OcRedis(host='localhost', port=invalid_port) client.get('newer') spans = span_retainer.spans() assert len(spans) == 1 span0 = spans[0] assert span0.name == 'redispy.Redis.get' # Ensure that the span for .get is the root span. assert span0.parent_span_id == None # Now check that the top most span has a Status root_span_status = span0.status assert root_span_status.code == 2 # Unknown as per https://opencensus.io/tracing/span/status/#status-code-mapping assert root_span_status.message == 'Error 8 connecting to localhost:262144. nodename nor servname provided, or not known.' assert root_span_status.details == None # Next let's check that stats are recorded. view_data_list = view_data_retainer.view_data() assert len(view_data_list) >= 2 # Expecting the values for the various views per method. # However, since stats recording is time-imprecise we can # less or more values recorded, hence bucketize view_data by # name first and then perform the various assertions. view_data_by_name = bucketize_view_data_by_name(view_data_list) calls_view_data_list = view_data_by_name['redispy/calls'] assert len(calls_view_data_list) > 0 latency_view_data_list = view_data_by_name['redispy/latency'] assert len(latency_view_data_list) > 0 calls_view_data_get = calls_view_data_list[0] latency_view_data_execute_command = latency_view_data_list[0] count_aggregation = CountAggregation() view_calls_execute_command = calls_view_data_get.view # assert view_calls_execute_command.aggregation.count == 1 assert view_calls_execute_command.name == "redispy/calls" assert view_calls_execute_command.description == "The number of calls" assert view_calls_execute_command.columns == ['method', 'error', 'status'] # calls_execute_command_tag_values = view_calls_execute_command.get_tag_values( # calls_view_data_get.columns, view_calls_execute_command.columns) calls_tag_values = calls_view_data_get.tag_value_aggregation_data_map.keys() sorted_calls_tag_values = sorted(calls_tag_values, key=lambda tag_value_tuple: tag_value_tuple[0]) print(sorted_calls_tag_values) assert len(sorted_calls_tag_values) >= 1 assert sorted_calls_tag_values[0] == ( 'redispy.Redis.get', 'Error 8 connecting to localhost:262144. nodename nor servname provided, or not known.', 'ERROR', ) latency_distribution_aggregation = DistributionAggregation() view_latency_execute_command = latency_view_data_execute_command.view # assert view_calls_execute_command.aggregation.count == 1 assert view_latency_execute_command.name == "redispy/latency" assert view_latency_execute_command.description == "The distribution of the latencies per method" assert view_latency_execute_command.columns == ['method', 'error', 'status'] # calls_execute_command_tag_values = view_calls_execute_command.get_tag_values( # calls_view_data_get.columns, view_calls_execute_command.columns) # TODO: File a bug with OpenCensus-Python about them using strings # for start and endtime, instead of actual date* objects on which we # can easily calculate time spent etc. assert latency_view_data_execute_command.start_time != '' assert latency_view_data_execute_command.end_time != ''
from opencensus.ext.ocagent import trace_exporter from opencensus.ext.flask.flask_middleware import FlaskMiddleware from opencensus.trace.propagation.b3_format import B3FormatPropagator from opencensus.trace import config_integration import time import random import socket import os import flask import requests exporter=trace_exporter.TraceExporter( service_name=os.getenv('SERVICE_NAME'), endpoint=os.getenv('COLLECTOR')) tracer = Tracer(sampler=AlwaysOnSampler(), exporter=exporter, propagator=B3FormatPropagator()) integration = ['requests'] config_integration.trace_integrations(integration) app = flask.Flask(__name__) middleware = FlaskMiddleware(app, exporter=exporter, sampler=AlwaysOnSampler(), propagator=B3FormatPropagator(), blacklist_paths=['_ah/health']) @app.route('/') def init(): with tracer.span(name='Initiate'): time.sleep(random.random()) with tracer.span(name='GetDataFromOutside'): response = requests.get(os.getenv('REMOTE_ENDPOINT')) with tracer.span(name='ProcessData'):
import time from concurrent import futures from dapr.ext.grpc import App, InvokeMethodRequest, InvokeMethodResponse from opencensus.trace.samplers import AlwaysOnSampler from opencensus.trace.tracer import Tracer from opencensus.ext.grpc import server_interceptor from opencensus.trace.samplers import AlwaysOnSampler tracer_interceptor = server_interceptor.OpenCensusServerInterceptor( AlwaysOnSampler()) app = App(thread_pool=futures.ThreadPoolExecutor(max_workers=10), interceptors=(tracer_interceptor, )) @app.method(name='say') def say(request: InvokeMethodRequest) -> InvokeMethodResponse: tracer = Tracer(sampler=AlwaysOnSampler()) with tracer.span(name='say') as span: data = request.text() span.add_annotation('Request length', len=len(data)) print(request.metadata, flush=True) print(request.text(), flush=True) return InvokeMethodResponse(b'SAY', "text/plain; charset=UTF-8") @app.method(name='sleep') def sleep(request: InvokeMethodRequest) -> InvokeMethodResponse:
def __init__(self, logger, flask_app=None): self._properties = { "service_name": getenv(CONF_SERVICE_NAME), "service_version": getenv(CONF_SERVICE_VERSION), "service_cluster": getenv(CONF_SERVICE_CLUSTER), "service_model_name": getenv(CONF_SERVICE_MODEL_NAME), "service_model_version": getenv(CONF_SERVICE_MODEL_VERSION), "service_container_version": getenv(CONF_SERVICE_CONTAINER_VERSION), "service_container_name": getenv(CONF_SERVICE_CONTAINER_NAME), "task_id": "none", } self.logger = logger self.metrics = {} self.tracer = None self.appinsights_key = getenv("APPINSIGHTS_INSTRUMENTATIONKEY", None) if self.appinsights_key: try: print("Setting up Azure Monitor with Application Insights.") config_integration.trace_integrations(["logging"]) # self.logger = logging.getLogger(getenv(CONF_SERVICE_NAME)) self.logger.setLevel(logging.INFO) handler = AzureLogHandler( connection_string="InstrumentationKey=" + self.appinsights_key) self.logger.addHandler(handler) self.azure_exporter = AzureExporter( connection_string="InstrumentationKey=" + self.appinsights_key, timeout=getenv("APPINSIGHTS_TIMEOUT", 30.0), ) sampling_rate = getenv("TRACE_SAMPLING_RATE", None) if not sampling_rate: sampling_rate = 1.0 self.middleware = None if flask_app: self.middleware = FlaskMiddleware( flask_app, exporter=self.azure_exporter, sampler=ProbabilitySampler(rate=float(sampling_rate)), ) # self.tracer = Tracer( # exporter=self.azure_exporter, # sampler=ProbabilitySampler(rate=float(sampling_rate)), # ) self.tracer = Tracer(exporter=self.azure_exporter, sampler=AlwaysOnSampler()) self.metrics_exporter = metrics_exporter.new_metrics_exporter( connection_string="InstrumentationKey=" + self.appinsights_key) stats = stats_module.stats self.view_manager = stats.view_manager self.view_manager.register_exporter(self.metrics_exporter) self.stats_recorder = stats.stats_recorder except Exception as e: print("Exception in setting up the Azure Monitor:") print(e)
def initialize_tracer(): exporter = StackdriverExporter() tracer = Tracer(exporter=exporter, sampler=AlwaysOnSampler()) return tracer
from flask import Flask from opencensus.trace.tracer import Tracer from opencensus.trace.samplers import AlwaysOnSampler from opencensus.stats import stats as stats_module tracer = Tracer(sampler=AlwaysOnSampler()) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder app = Flask(__name__) @app.route('/') def index(): return "<h1>Hello world</h1>" if __name__ == '__main__': app.run(debug=True)
def test_distributed_tracing_policy_solo(should_set_sdk_context): """Test policy with no other policy and happy path""" with ContextHelper(): exporter = MockExporter() trace = tracer_module.Tracer(sampler=AlwaysOnSampler(), exporter=exporter) with trace.span("parent"): if should_set_sdk_context: tracing_context.current_span.set( OpenCensusSpan(trace.current_span())) policy = DistributedTracingPolicy() request = HttpRequest("GET", "http://127.0.0.1/temp?query=query") request.headers[ "x-ms-client-request-id"] = "some client request id" pipeline_request = PipelineRequest(request, PipelineContext(None)) policy.on_request(pipeline_request) response = HttpResponse(request, None) response.headers = request.headers response.status_code = 202 response.headers["x-ms-request-id"] = "some request id" ctx = trace.span_context header = trace.propagator.to_headers(ctx) assert request.headers.get("traceparent") == header.get( "traceparent") policy.on_response( pipeline_request, PipelineResponse(request, response, PipelineContext(None))) time.sleep(0.001) policy.on_request(pipeline_request) policy.on_exception(pipeline_request) trace.finish() exporter.build_tree() parent = exporter.root network_span = parent.children[0] assert network_span.span_data.name == "/temp" assert network_span.span_data.attributes.get("http.method") == "GET" assert network_span.span_data.attributes.get("component") == "http" assert network_span.span_data.attributes.get( "http.url") == "http://127.0.0.1/temp?query=query" assert network_span.span_data.attributes.get("http.user_agent") is None assert network_span.span_data.attributes.get( "x-ms-request-id") == "some request id" assert network_span.span_data.attributes.get( "x-ms-client-request-id") == "some client request id" assert network_span.span_data.attributes.get("http.status_code") == 202 network_span = parent.children[1] assert network_span.span_data.name == "/temp" assert network_span.span_data.attributes.get("http.method") == "GET" assert network_span.span_data.attributes.get("component") == "http" assert network_span.span_data.attributes.get( "http.url") == "http://127.0.0.1/temp?query=query" assert network_span.span_data.attributes.get( "x-ms-client-request-id") == "some client request id" assert network_span.span_data.attributes.get("http.user_agent") is None assert network_span.span_data.attributes.get("x-ms-request-id") == None assert network_span.span_data.attributes.get("http.status_code") == 504
def exporter(): exporter = CapturingExporter() tracer_module.Tracer(sampler=AlwaysOnSampler(), exporter=exporter, propagator=GoogleCloudFormatPropagator()) return exporter
def gcloud_opencensus_tracer_generator(trace): span_context = propogator.from_header(trace) tracer = Tracer( exporter=exporter, span_context=span_context, sampler=AlwaysOnSampler() ) return tracer
def test_passing_kind_in_ctor(self): with ContextHelper() as ctx: trace = tracer_module.Tracer(sampler=AlwaysOnSampler()) parent = trace.start_span() wrapped_class = OpenCensusSpan(kind=SpanKind.CLIENT) assert wrapped_class.kind == SpanKind.CLIENT