def test_new_metrics_exporter(self, exporter_mock): iKey = '12345678-1234-5678-abcd-12345678abcd' exporter = metrics_exporter.new_metrics_exporter( instrumentation_key=iKey) self.assertEqual(exporter.options.instrumentation_key, iKey) self.assertEqual(len(exporter_mock.call_args_list), 1) self.assertEqual(len(exporter_mock.call_args[0][0]), 2) producer_class = standard_metrics.AzureStandardMetricsProducer self.assertFalse(isinstance(exporter_mock.call_args[0][0][0], producer_class)) self.assertTrue(isinstance(exporter_mock.call_args[0][0][1], producer_class))
def __init__(self, test_name, test_description=None): # oc will automatically search for the ENV VAR 'APPLICATIONINSIGHTS_CONNECTION_STRING' self.exporter = metrics_exporter.new_metrics_exporter() self.stats = stats_module.stats self.view_manager = self.stats.view_manager self.stats_recorder = self.stats.stats_recorder self.azure_logger = get_azure_logger(test_name) self.name = test_name self.desc = test_description events_measure_name = "The number of events handled by " + self.name events_measure_desc = "The number of events handled by " + self.desc if self.desc else None memory_measure_name = "memory usage percentage for " + self.name memory_measure_desc = "memory usage percentage for " + self.desc if self.desc else None cpu_measure_name = "cpu usage percentage for " + self.name cpu_measure_desc = "cpu usage percentage for " + self.desc if self.desc else None error_measure_name = "error count for " + self.name error_measure_desc = "The number of errors happened while running the test for " + self.desc if self.desc else None self.events_measure = measure_module.MeasureInt( events_measure_name, events_measure_desc, "events") self.memory_measure = measure_module.MeasureFloat( memory_measure_name, memory_measure_desc) self.cpu_measure = measure_module.MeasureFloat(cpu_measure_name, cpu_measure_desc) self.error_measure = measure_module.MeasureInt(error_measure_name, error_measure_desc) self.events_measure_view = view_module.View( events_measure_name, events_measure_desc, [], self.events_measure, aggregation_module.SumAggregation()) self.memory_measure_view = view_module.View( memory_measure_name, memory_measure_desc, [], self.memory_measure, aggregation_module.LastValueAggregation()) self.cpu_measure_view = view_module.View( cpu_measure_name, cpu_measure_desc, [], self.cpu_measure, aggregation_module.LastValueAggregation()) self.error_measure_view = view_module.View( error_measure_name, error_measure_desc, [], self.error_measure, aggregation_module.CountAggregation()) self.view_manager.register_view(self.events_measure_view) self.view_manager.register_view(self.memory_measure_view) self.view_manager.register_view(self.cpu_measure_view) self.view_manager.register_view(self.error_measure_view) self.mmap = self.stats_recorder.new_measurement_map()
def test_new_metrics_exporter_no_standard_metrics(self, exporter_mock): with mock.patch('opencensus.ext.azure.metrics_exporter' '.heartbeat_metrics.enable_heartbeat_metrics') as hb: hb.return_value = None iKey = '12345678-1234-5678-abcd-12345678abcd' exporter = metrics_exporter.new_metrics_exporter( instrumentation_key=iKey, enable_standard_metrics=False) self.assertEqual(exporter.options.instrumentation_key, iKey) self.assertEqual(len(exporter_mock.call_args_list), 1) self.assertEqual(len(exporter_mock.call_args[0][0]), 1) producer_class = standard_metrics.AzureStandardMetricsProducer self.assertFalse( isinstance(exporter_mock.call_args[0][0][0], producer_class))
def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics exporter = metrics_exporter.new_metrics_exporter(export_interval=2) view_manager.register_exporter(exporter) view_manager.register_view(CARROTS_VIEW) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() mmap.measure_int_put(CARROTS_MEASURE, 1000) mmap.record(tmap) time.sleep(10) print("Done recording metrics")
def __init__(self, export_metrics=False): self.nb_check_requests = measure_module.MeasureInt( "nb_check_requests", "number of dates check requests for all weeks", "nb") self.nb_check_requests_view = view_module.View( "nb_check_requests view", "number of dates check requests for all weeks", [], self.nb_check_requests, aggregation_module.CountAggregation()) view_manager.register_view(self.nb_check_requests_view) self.mmap = stats_recorder.new_measurement_map() self.tmap = tag_map_module.TagMap() if export_metrics: exporter = metrics_exporter.new_metrics_exporter( connection_string=azure_insights.connection_string) view_manager.register_exporter(exporter)
def main(): # TODO: you need to specify the instrumentation key in a connection string # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING # environment variable. # All you need is the next line. You can disable standard metrics by # passing in enable_standard_metrics=False into the constructor of # new_metrics_exporter() _exporter = metrics_exporter.new_metrics_exporter() print(_exporter.max_batch_size) for _ in range(100): print(psutil.virtual_memory()) time.sleep(5) print("Done recording metrics")
def __init__(self): """Initializes Condensed_Binocular using the Run object of azureml.core, and adding a metric exporter for ApplicationInsights using the opencensus library. """ env = Env() env.read_env() self.run = Run.get_context(allow_offline=True) self.run_id = self.get_run_id(self.run) self.offline_run = self.run.id.startswith(constants.OFFLINE_RUN_PREFIX) self.exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=False, export_interval=0, connection_string=env("APP_INSIGHTS_CONNECTION_KEY")) self.exporter.add_telemetry_processor(self.callback_function) stats_module.stats.view_manager.register_exporter(self.exporter)
def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics # TODO: you need to specify the instrumentation key in a connection string # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING # environment variable. exporter = metrics_exporter.new_metrics_exporter() view_manager.register_exporter(exporter) view_manager.register_view(CARROTS_VIEW) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() mmap.measure_int_put(CARROTS_MEASURE, 1000) mmap.record(tmap) print("Done recording metrics")
def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics exporter = metrics_exporter.new_metrics_exporter() view_manager.register_exporter(exporter) view_manager.register_view(NUM_REQUESTS_VIEW) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() tmap.insert("url", "http://example.com") for i in range(100): print(i) mmap.measure_int_put(REQUEST_MEASURE, i) mmap.record(tmap) time.sleep(1) print("Done recording metrics")
def main(port): tornado.options.parse_command_line() loop = tornado.ioloop.IOLoop.current() if os.environ.get("APPLICATIONINSIGHTS_CONNECTION_STRING"): _exporter = metrics_exporter.new_metrics_exporter() app_log.info("Metric exporter started.") # initialize AD extension attributes loop.run_sync(graph.ensure_custom_attrs_exist) graph.keep_jwt_keys_updated() handlers.start_app( endpoints(), port, consecutive_failures=0, consecutive_failure_limit=int(os.environ.get("CONSECUTIVE_FAILURE_LIMIT", "3")), xheaders=True, )
def __init__(self): self.metrics = {} self.logger = logging.getLogger(__name__) self.appinsights_key = None raw_key = getenv(APPINSIGHTS_INSTRUMENTATIONKEY, None) if raw_key and len(raw_key.strip()) > 0: self.appinsights_key = raw_key.strip() if self.appinsights_key: handler = AzureLogHandler( connection_string="InstrumentationKey=" + str(getenv("APPINSIGHTS_INSTRUMENTATIONKEY"))) self.logger.addHandler(handler) exporter = metrics_exporter.new_metrics_exporter( connection_string="InstrumentationKey=" + str(getenv("APPINSIGHTS_INSTRUMENTATIONKEY"))) view_manager.register_exporter(exporter)
def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics exporter = metrics_exporter.new_metrics_exporter(export_interval=5) view_manager.register_exporter(exporter) view_manager.register_view(latency_view) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() for i in range(100): ms = random.random() * 5 * 1000 print("Latency {0}:{1}".format(i, ms)) mmap.measure_float_put(m_latency_ms, ms) mmap.record(tmap) time.sleep(1) print("Done recording metrics")
def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics exporter = metrics_exporter.new_metrics_exporter( connection_string='InstrumentationKey=<Your Key>') view_manager.register_exporter(exporter) view_manager.register_view(CARROTS_VIEW) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() mmap.measure_int_put(CARROTS_MEASURE, 1000) mmap.record(tmap) # Default export interval is every 15.0s # Your application should run for at least this amount # of time so the exporter will meet this interval # Sleep can fulfill this time.sleep(60) print("Done recording metrics")
def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics # TODO: you need to specify the instrumentation key in a connection string # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING # environment variable. exporter = metrics_exporter.new_metrics_exporter() view_manager.register_exporter(exporter) view_manager.register_view(NUM_REQUESTS_VIEW) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() tmap.insert("url", "http://example.com") for i in range(100): print(i) mmap.measure_int_put(REQUEST_MEASURE, i) mmap.record(tmap) time.sleep(1) print("Done recording metrics")
def setupOpenCensusAndPrometheusExporter(): # __name__ contains the full name of the current module logger = logging.getLogger(__name__) logger.addHandler(AzureLogHandler( connection_string='InstrumentationKey=<Your Key>' )) stats = stats_module.stats view_manager = stats.view_manager exporter = metrics_exporter.new_metrics_exporter( connection_string='InstrumentationKey=<Your Key>') view_manager.register_exporter(exporter) registerAllViews(view_manager) view_manager.register_view(latency_view) time.sleep(5)
from opencensus.tags import tag_map as tag_map_module stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder prompt_measure = measure_module.MeasureInt("prompts", "number of prompts", "prompts") prompt_view = view_module.View("prompt view", "number of prompts", [], prompt_measure, aggregation_module.CountAggregation()) view_manager.register_view(prompt_view) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() exporter = metrics_exporter.new_metrics_exporter( connection_string=connection_string) view_manager.register_exporter(exporter) def prompt(): input("Press enter.") mmap.measure_int_put(prompt_measure, 1) mmap.record(tmap) metrics = list(mmap.measure_to_view_map.get_metrics(datetime.utcnow())) print(metrics[0].time_series[0].points[0]) def main(): while True: prompt()
# Logging logger = logging.getLogger(__name__) logger.addHandler(AzureEventHandler( connection_string='InstrumentationKey=ee6147a6-6162-463a-a119-bfeaff3147f6') ) logger.addHandler(AzureLogHandler( connection_string='InstrumentationKey=ee6147a6-6162-463a-a119-bfeaff3147f6') ) logger.setLevel(logging.INFO) # Metrics exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string='InstrumentationKey=ee6147a6-6162-463a-a119-bfeaff3147f6') # Tracing tracer = Tracer( exporter=AzureExporter( connection_string='InstrumentationKey=ee6147a6-6162-463a-a119-bfeaff3147f6'), sampler=ProbabilitySampler(1.0), ) app = Flask(__name__) # Requests middleware = FlaskMiddleware( app, exporter=AzureExporter(connection_string="InstrumentationKey=ee6147a6-6162-463a-a119-bfeaff3147f6"),
# Logging logger = logging.getLogger(__name__) handler = AzureLogHandler( connection_string='InstrumentationKey=3d3e7bec-d2e0-41b6-b690-7c9cb2020fc2' ) logger.addHandler(handler) logger.setLevel(logging.INFO) logger.addHandler( AzureEventHandler( connection_string= 'InstrumentationKey=3d3e7bec-d2e0-41b6-b690-7c9cb2020fc2')) # Metrics exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string='InstrumentationKey=3d3e7bec-d2e0-41b6-b690-7c9cb2020fc2' ) # Tracing tracer = Tracer( exporter=AzureExporter( connection_string= 'InstrumentationKey=3d3e7bec-d2e0-41b6-b690-7c9cb2020fc2'), sampler=ProbabilitySampler(1.0), ) app = Flask(__name__) # Requests middleware = FlaskMiddleware( app,
from opencensus.stats import stats as stats_module from opencensus.stats import view as view_module from opencensus.tags import tag_map as tag_map_module from opencensus.ext.azure.trace_exporter import AzureExporter from opencensus.trace.samplers import ProbabilitySampler from opencensus.trace.tracer import Tracer from opencensus.ext.flask.flask_middleware import FlaskMiddleware # Logging #logger = # TODO: Setup logger logger = logging.getLogger(__name__) logger.addHandler(AzureLogHandler(connection_string='InstrumentationKey=d2989c85-e9da-4470-97b3-2c4186d9785f')) # Metrics #exporter = # TODO: Setup exporter exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string='InstrumentationKey=d2989c85-e9da-4470-97b3-2c4186d9785f') # Tracing #tracer = # TODO: Setup tracer tracer = Tracer( exporter=AzureExporter( connection_string='InstrumentationKey=d2989c85-e9da-4470-97b3-2c4186d9785f'), sampler=ProbabilitySampler(1.0), ) app = Flask(__name__) # Requests #middleware = # TODO: Setup flask middleware middleware = FlaskMiddleware(
# Trace integrations for requests library config_integration.trace_integrations(['requests']) # FlaskMiddleware will track requests for the Flask application and send # request/dependency telemetry to Azure Monitor middleware = FlaskMiddleware(app) # Processor function for changing the role name of the app def callback_function(envelope): envelope.tags['ai.cloud.role'] = "To-Do App" return True # Adds the telemetry processor to the trace exporter middleware.exporter.add_telemetry_processor(callback_function) # Exporter for metrics, will send metrics data exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=False, connection_string='InstrumentationKey=' + Config.INSTRUMENTATION_KEY) # Exporter for logs, will send logging data logger.addHandler( AzureLogHandler( connection_string='InstrumentationKey=' + Config.INSTRUMENTATION_KEY ) ) if __name__ == '__main__': app.run(host='localhost', port=5000, threaded=True, debug=True)
from opencensus.tags import tag_map as tag_map_module from opencensus.ext.azure.trace_exporter import AzureExporter from opencensus.trace.samplers import ProbabilitySampler from opencensus.trace.tracer import Tracer from opencensus.ext.flask.flask_middleware import FlaskMiddleware from opencensus.ext.azure.log_exporter import AzureEventHandler app_insught_intrumentation_key = '841b1abd-f5e6-499e-a744-d59a6ca8762a' # Logging logger = logging.getLogger(__name__) logger.addHandler(AzureEventHandler(connection_string='InstrumentationKey='+app_insught_intrumentation_key)) logger.setLevel(logging.INFO) # Metrics exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string='InstrumentationKey='+app_insught_intrumentation_key) # Tracing tracer = Tracer( exporter=AzureExporter( connection_string='InstrumentationKey='+app_insught_intrumentation_key), sampler=ProbabilitySampler(1.0), ) app = Flask(__name__) # Requests middleware = FlaskMiddleware( app, exporter=AzureExporter(connection_string="InstrumentationKey="+app_insught_intrumentation_key),
from opencensus.tags import tag_map as tag_map_module from opencensus.ext.azure.trace_exporter import AzureExporter from opencensus.trace.samplers import ProbabilitySampler from opencensus.trace.tracer import Tracer from opencensus.ext.flask.flask_middleware import FlaskMiddleware #logger = # TODO: Setup logger logger = logging.getLogger(__name__) logger.addHandler( AzureLogHandler(connection_string= 'InstrumentationKey=d1c291be-3bd7-4d5c-9eaa-618310979e86')) # Metrics #exporter = # TODO: Setup exporter exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string='InstrumentationKey=d1c291be-3bd7-4d5c-9eaa-618310979e86' ) # Tracing #tracer = # TODO: Setup tracer tracer = Tracer( exporter=AzureExporter( connection_string= 'InstrumentationKey=d1c291be-3bd7-4d5c-9eaa-618310979e86'), sampler=ProbabilitySampler(1.0), ) app = Flask(__name__) # Requests #middleware = # TODO: Setup flask middleware
from opencensus.tags import tag_map as tag_map_module from opencensus.ext.azure.trace_exporter import AzureExporter from opencensus.trace.samplers import ProbabilitySampler from opencensus.trace.tracer import Tracer from opencensus.ext.flask.flask_middleware import FlaskMiddleware from applicationinsights import TelemetryClient # Logging logger = logging.getLogger(__name__) handler = AzureLogHandler(connection_string='InstrumentationKey=5e2837e6-5c38-4d77-9719-64e7f2d519a4') logger.addHandler(handler) logger.setLevel(logging.INFO) # Metrics exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string='InstrumentationKey=5e2837e6-5c38-4d77-9719-64e7f2d519a4') # Tracing tracer = Tracer( exporter=AzureExporter( connection_string='InstrumentationKey=5e2837e6-5c38-4d77-9719-64e7f2d519a4'), sampler=ProbabilitySampler(1.0), ) # telemetry_client = TelemetryClient('5e2837e6-5c38-4d77-9719-64e7f2d519a4') app = Flask(__name__) # Requests
'InstrumentationKey=75bbbb31-b712-4e5d-abc7-8eb89689ce08;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/' )) #Events eventlogger = logging.getLogger('eventlogger') eventlogger.addHandler( AzureEventHandler( connection_string= 'InstrumentationKey=75bbbb31-b712-4e5d-abc7-8eb89689ce08;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/' )) eventlogger.setLevel(logging.INFO) # Metrics exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string= 'InstrumentationKey=75bbbb31-b712-4e5d-abc7-8eb89689ce08;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/' ) # Tracing tracer = Tracer( exporter=AzureExporter( connection_string= 'InstrumentationKey=75bbbb31-b712-4e5d-abc7-8eb89689ce08;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/' ), sampler=ProbabilitySampler(1.0), ) app = Flask(__name__) # Requests # middleware = # TODO: Setup flask middleware
exporter=AzureExporter( connection_string= 'InstrumentationKey=a830027f-3b39-4371-8877-8ee0c3050e58'), sampler=ProbabilitySampler(rate=1.0), ) # TODO: Setup flask middleware # Logging logger = logging.getLogger(__name__) logger.addHandler( AzureLogHandler(connection_string= 'InstrumentationKey=a830027f-3b39-4371-8877-8ee0c3050e58')) # Metrics TODO: Setup exporter exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string='InstrumentationKey=a830027f-3b39-4371-8877-8ee0c3050e58' ) # Tracing tracer = Tracer( exporter=AzureExporter( connection_string= 'InstrumentationKey=a830027f-3b39-4371-8877-8ee0c3050e58'), sampler=ProbabilitySampler(1.0), ) # Load configurations from environment or config file app.config.from_pyfile('config_file.cfg') if ("VOTE1VALUE" in os.environ and os.environ['VOTE1VALUE']): button1 = os.environ['VOTE1VALUE'] else:
logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) # load configuration to get around hard coded tokens config = configparser.ConfigParser() with open('config.ini') as config_file: config.read_file(config_file) if config.has_option('Configuration', 'azure_log'): logger.addHandler( AzureLogHandler( connection_string=config["Configuration"]["azure_log"])) # the default metrics exporter will include # stats like memory, CPU, etc exporter = metrics_exporter.new_metrics_exporter( connection_string=config["Configuration"]["azure_log"]) # startup stuff print('discordpy') print(discord.__version__) client = commands.Bot( command_prefix='>>', description='https://github.com/Chris-Johnston/CssBot-Py', case_insensitive=True) # this is where extensions are added by default default_extensions = [ 'cogs.basic', 'cogs.courseInfo', 'cogs.number_utils', 'cogs.hardware_utils', 'cogs.analytics', 'cogs.gpa', 'cogs.manpage', 'cogs.markov', 'cogs.hyphen', 'cogs.crob', 'cogs.starboard', 'cogs.advent',
from opencensus.trace.tracer import Tracer from opencensus.ext.flask.flask_middleware import FlaskMiddleware from opencensus.ext.azure.log_exporter import AzureEventHandler # Logging logger = logging.getLogger(__name__) logger.addHandler( AzureEventHandler( connection_string= 'InstrumentationKey=15d02af3-bb8b-48ab-937e-7f84588dfce0;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/' )) # Metrics exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string= 'InstrumentationKey=15d02af3-bb8b-48ab-937e-7f84588dfce0;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/' ) # Tracing tracer = Tracer( exporter=AzureExporter( connection_string= 'InstrumentationKey=15d02af3-bb8b-48ab-937e-7f84588dfce0;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/' ), sampler=ProbabilitySampler(1.0), ) app = Flask(__name__) # Requests
prompt_measure = measure_module.MeasureInt("rcs_log_num", "number of rcs_logs", "logs") prompt_view = view_module.View("rcs_log_view_3", "number of logs", [], prompt_measure, aggregation_module.SumAggregation()) view_manager.register_view(prompt_view) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() # Register the metrics exporter ##customMetrics ## | where name =='prompt view' from opencensus.ext.azure import metrics_exporter exporter = metrics_exporter.new_metrics_exporter( connection_string='InstrumentationKey=<Your Key>') view_manager.register_exporter(exporter) # metrics usage example def test_metrics(): for _ in range(100): value = random.randint(1, 10) mmap.measure_int_put(prompt_measure, value) mmap.record(tmap) metrics = list(mmap.measure_to_view_map.get_metrics(datetime.utcnow())) print(value, ' : ', metrics[0].time_series[0].points[0]) time.sleep(1) time.sleep(100)
from opencensus.ext.flask.flask_middleware import FlaskMiddleware from opencensus.trace.samplers import ProbabilitySampler # Logging logger = logging.getLogger(__name__) logger.addHandler( AzureLogHandler( connection_string= 'InstrumentationKey=bdba70b7-8e6e-4adb-adb5-8fd02df32aea;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/' )) logger.setLevel(logging.INFO) # Metrics exporter = metrics_exporter.new_metrics_exporter( enable_standard_metrics=True, connection_string= 'InstrumentationKey=bdba70b7-8e6e-4adb-adb5-8fd02df32aea;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/' ) # Tracing tracer = Tracer( exporter=AzureExporter( connection_string= 'InstrumentationKey=bdba70b7-8e6e-4adb-adb5-8fd02df32aea;IngestionEndpoint=https://westus2-1.in.applicationinsights.azure.com/' ), sampler=ProbabilitySampler(1.0), ) app = Flask(__name__) # Requests
from opencensus.stats import view as view_module from opencensus.tags import tag_map as tag_map_module from opencensus.ext.azure.trace_exporter import AzureExporter from opencensus.trace.samplers import ProbabilitySampler from opencensus.trace.tracer import Tracer from opencensus.ext.flask.flask_middleware import FlaskMiddleware CONN_STR = 'InstrumentationKey=84fadb80-b028-4f05-90f7-300fa367c8fd' # Logging logger = logging.getLogger(__name__) logger.addHandler(AzureLogHandler(connection_string=CONN_STR)) logger.setLevel(logging.INFO) # Metrics exporter = metrics_exporter.new_metrics_exporter(enable_standard_metrics=True, connection_string=CONN_STR) # Tracing tracer = Tracer( exporter=AzureExporter(connection_string=CONN_STR), sampler=ProbabilitySampler(1.0), ) app = Flask(__name__) # Requests middleware = FlaskMiddleware( app, exporter=AzureExporter(connection_string=CONN_STR), sampler=ProbabilitySampler(rate=1.0), )