def __init__(self, run):
        print('Initializing the AppInsightsLogger')
        self.env = Env()
        self.run_id = self.get_run_id_and_set_context(run)

        # Prepare integrations and initialize tracer
        config_integration.trace_integrations(['httplib', 'logging'])
        texporter = AzureExporter(
            connection_string=self.env.app_insights_connection_string)
        texporter.add_telemetry_processor(self.callback_function)
        self.tracer = Tracer(exporter=texporter,
                             sampler=ProbabilitySampler(
                                 self.env.trace_sampling_rate))

        # Create AppInsights Handler and set log format
        self.logger = logging.getLogger(__name__)
        self.logger.setLevel(
            getattr(logging, self.env.log_level.upper(), "WARNING"))
        handler = AzureLogHandler(
            connection_string=self.env.app_insights_connection_string,
            logging_sampling_rate=self.env.log_sampling_rate,
        )
        handler.add_telemetry_processor(self.callback_function)
        self.logger.addHandler(handler)

        # initializes metric exporter
        mexporter = metrics_exporter.new_metrics_exporter(
            enable_standard_metrics=self.env.enable_standard_metrics,
            export_interval=self.env.metrics_export_interval,
            connection_string=self.env.app_insights_connection_string,
        )
        mexporter.add_telemetry_processor(self.callback_function)
        stats_module.stats.view_manager.register_exporter(mexporter)
Beispiel #2
0
def get_flask_middleware(app):
    config_integration.trace_integrations(['requests'])
    exporter = _get_exporter()
    return FlaskMiddleware(
        app,
        sampler=opencensus.trace.tracer.samplers.AlwaysOnSampler(),
        exporter=exporter)
Beispiel #3
0
def initialize_logging(logging_level: int, correlation_id: str = None) -> logging.LoggerAdapter:
    """
    Adds the Application Insights handler for the root logger and sets the given logging level.
    Creates and returns a logger adapter that integrates the correlation ID, if given, to the log messages.

    :param logging_level: The logging level to set e.g., logging.WARNING.
    :param correlation_id: Optional. The correlation ID that is passed on to the operation_Id in App Insights.
    :returns: A newly created logger adapter.
    """
    logger = logging.getLogger()
    logger.addHandler(logging.StreamHandler())  # For logging into console
    app_insights_connection_string = os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING")

    try:
        logger.addHandler(AzureLogHandler(connection_string=app_insights_connection_string))
    except ValueError as e:
        logger.error(f"Failed to set Application Insights logger handler: {e}")

    config_integration.trace_integrations(['logging'])
    logging.basicConfig(level=logging_level, format='%(asctime)s traceId=%(traceId)s spanId=%(spanId)s %(message)s')
    Tracer(sampler=AlwaysOnSampler())
    logger.setLevel(logging_level)

    extra = None

    if correlation_id:
        extra = {'traceId': correlation_id}

    adapter = logging.LoggerAdapter(logger, extra)
    adapter.debug(f"Logger adapter initialized with extra: {extra}")

    return adapter
Beispiel #4
0
 def setUp(self):
     self.exporter = CapturingExporter()
     self.tracer = tracer_module.Tracer(
         sampler=AlwaysOnSampler(),
         exporter=self.exporter,
         propagator=GoogleCloudFormatPropagator())
     config_integration.trace_integrations(['redis'], tracer=self.tracer)
     self.client = redis.StrictRedis()
Beispiel #5
0
def __get_logger() -> Logger:
    # Create a logger with Azure Application Insights
    config_integration.trace_integrations(['logging'])
    logger = logging.getLogger(__name__)
    handler = AzureLogHandler(
        connection_string=os.getenv('ApplicationInsights'))
    handler.setFormatter(logging.Formatter('%(traceId)s %(message)s'))
    logger.addHandler(handler)
    return logger
    def setUp(self):
        super(TestClient, self).setUp()
        self.exporter = CapturingExporter()
        self.tracer = tracer_module.Tracer(
            sampler=AlwaysOnSampler(),
            exporter=self.exporter,
            propagator=GoogleCloudFormatPropagator())

        config_integration.trace_integrations(['tornado_httpclient'],
                                              tracer=self.tracer)
Beispiel #7
0
def initialize_opencensus(config, flask_app):
	if config.opencensus_enable:
		agent = config.opencensus_agent
		print(f"OpenCensus enabled and reporting to {agent} using gRPC")
		exporter = ocagent_trace_exporter.TraceExporter(service_name=SERVICE_NAME, endpoint=agent)
		sampler = samplers.ProbabilitySampler(rate=config.opencensus_sample_rate)
		config_integration.trace_integrations(['sqlalchemy'])
		FlaskMiddleware(flask_app, exporter=exporter, sampler=sampler)
		flask_app.wsgi_app = SentryIoContextMiddleware(flask_app.wsgi_app)

		flask.before_render_template.connect(render_template_start, flask_app)
		flask.template_rendered.connect(render_template_end, flask_app)
    def __init__(self, config=None):
        """Create an instance of the Logger class.

        Args:
            config:([dict], optional):
                Contains the setting for logger {"log_level": "DEBUG","logging_enabled":"true"",
                                    "app_insights_key":"<app insights key>"}
        """
        config_integration.trace_integrations(["logging"])
        config_integration.trace_integrations(['requests'])
        self.config = {"log_level": logging.INFO, "logging_enabled": "true"}
        self.APPINSIGHTS_INSTRUMENTATION_KEY = "APPINSIGHTS_INSTRUMENTATION_KEY"
        self.update_config(config)
Beispiel #9
0
def initialize_logging(logging_level: int, correlation_id: str, add_console_handler: bool = False) -> logging.LoggerAdapter:
    """
    Adds the Application Insights handler for the root logger and sets the given logging level.
    Creates and returns a logger adapter that integrates the correlation ID, if given, to the log messages.
    Note: This should be called only once, otherwise duplicate log entries could be produced.

    :param logging_level: The logging level to set e.g., logging.WARNING.
    :param correlation_id: Optional. The correlation ID that is passed on to the operation_Id in App Insights.
    :returns: A newly created logger adapter.
    """
    logger = logging.getLogger()

    # When using sessions and NEXT_AVAILABLE_SESSION we see regular exceptions which are actually expected
    # See https://github.com/Azure/azure-sdk-for-python/issues/9402
    # Other log entries such as 'link detach' also confuse the logs, and are expected.
    # We don't want these making the logs any noisier so we raise the logging level for that logger here
    # To inspect all the loggers, use -> loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict]
    for logger_name in LOGGERS_FOR_ERRORS_ONLY:
        logging.getLogger(logger_name).setLevel(logging.ERROR)

    if add_console_handler:
        console_formatter = logging.Formatter(fmt='%(module)-7s %(name)-7s %(process)-7s %(asctime)s %(levelname)-7s %(message)s')
        console_handler = logging.StreamHandler()
        console_handler.setFormatter(console_formatter)
        logger.addHandler(console_handler)

    try:
        azurelog_formatter = AzureLogFormatter()
        # picks up APPLICATIONINSIGHTS_CONNECTION_STRING automatically
        azurelog_handler = AzureLogHandler()
        azurelog_handler.setFormatter(azurelog_formatter)
        logger.addHandler(azurelog_handler)
    except ValueError as e:
        logger.error(f"Failed to set Application Insights logger handler: {e}")

    config_integration.trace_integrations(['logging'])
    logging.basicConfig(level=logging_level, format='%(asctime)s traceId=%(traceId)s spanId=%(spanId)s %(message)s')
    Tracer(sampler=AlwaysOnSampler())
    logger.setLevel(logging_level)

    extra = None

    if correlation_id:
        extra = {'traceId': correlation_id}

    adapter = logging.LoggerAdapter(logger, extra)
    adapter.debug(f"Logger adapter initialized with extra: {extra}")

    return adapter
    def setUp(self):
        self.exporter = CapturingExporter()
        self.tracer = tracer_module.Tracer(
            sampler=AlwaysOnSampler(),
            exporter=self.exporter,
            propagator=GoogleCloudFormatPropagator()
        )

        config_integration.trace_integrations(['redis'], tracer=self.tracer)

        self.client = redis.StrictRedis()

        # Stash away the original methods for
        # after-test restoration.
        self._execute_command = redis.StrictRedis.execute_command
        self._pipeline = redis.StrictRedis.pipeline
    def configure(cls,
                  libraries: List[str],
                  instrumentation_key: Optional[str] = None,
                  *args,
                  **kwargs):
        """Configure libraries for integrating into OpenCensus extension.
        Initialize an Azure Exporter that will write traces to AppInsights.

        :type libraries: List[str]
        :param libraries: the libraries that need to be integrated into
            OpenCensus tracer. (e.g. ['requests'])
        :type instrumentation_key: Optional[str]
        :param instrumentation_key: the instrumentation key for azure exporter
            to write into. If this is set to None, the extension will write to
            the AppInsight resource defined in APPINSIGHTS_INSTRUMENTATIONKEY
        """
        cls._has_configure_called = True

        cls._trace_integrations = config_integration.trace_integrations(
            libraries)

        if not instrumentation_key and not cls._default_key:
            raise FunctionExtensionException(
                'Please ensure either instrumentation_key is passed into '
                'OpenCensusExtension.configure() method, or the app setting '
                'APPINSIGHTS_INSTRUMENTATIONKEY is provided.')

        cls._exporter = AzureExporter(
            instrumentation_key=instrumentation_key or cls._default_key)
def entrypoint(request):
    """Execute tasks within a parent trace span when invoked via HTTP."""
    # Get the trace_id from the request's HTTP header
    span_context = \
        propagator.from_header(request.headers['X-Cloud-Trace-Context'])
    tracer.span_context = span_context

    # Enable tracing HTTP/gRPC calls issued by Google Cloud client libraries
    config_integration.trace_integrations(['google_cloud_clientlibs'], tracer)

    # Wrap function logic in a parent trace
    function_name = os.environ['FUNCTION_NAME']
    with tracer.span(name=function_name):
        add_journal_entry()

    url = 'https://console.cloud.google.com/traces/traces'
    return f'Visit `{url}` to see tracing data for this request.'
Beispiel #13
0
    def test_trace_integrations_not_exist(self):
        integrations = {
            'test_not_exists': True,
        }

        integrated = config_integration.trace_integrations(integrations)

        self.assertEqual(integrated, [])
Beispiel #14
0
    def init_app(self, app):
        INTEGRATIONS = ['httplib', 'sqlalchemy', 'requests']

        export_LocalForwarder = trace_exporter.TraceExporter(
            # FIXME - Move to config
            service_name=os.getenv('SERVICE_NAME', 'python-service'),
            endpoint=os.getenv('OCAGENT_TRACE_EXPORTER_ENDPOINT'),
            transport=BackgroundThreadTransport)

        tracer = tracer_module.Tracer(exporter=export_LocalForwarder,
                                      propagator=TraceContextPropagator())
        config_integration.trace_integrations(INTEGRATIONS, tracer=tracer)

        # Hookup OpenCensus to Flask
        FlaskMiddleware(app=app, exporter=export_LocalForwarder)

        # Also hookup AppInsights for logging
        AppInsights(app)
def create_trace_dependency(name=name,
                            message=message,
                            message_before=message_before,
                            message_after=message_after):
    config_integration.trace_integrations(['logging'])

    logger = logging.getLogger(__name__)

    handler = AzureLogHandler(connection_string='InstrumentationKey=%s' %
                              (instrument_key))
    handler.setFormatter(
        logging.Formatter('%(traceId)s %(spanId)s %(message)s'))
    logger.addHandler(handler)

    tracer = Tracer(exporter=AzureExporter(
        connection_string='InstrumentationKey=%s' % (instrument_key)),
                    sampler=ProbabilitySampler(1.0))

    logger.warning(message_before)
    with tracer.span(name=name):
        logger.warning(message)
    logger.warning(message_after)
    def configure(cls, libraries, connection_string=None, *args, **kwargs):
        """Configure libraries for integrating into OpenCensus extension.
        Initialize an Azure Exporter that will write traces to AppInsights.
        :type libraries: List[str]
        :param libraries: the libraries opencensus-ext-* that need to be
            integrated into OpenCensus tracer. (e.g. ['requests'])
        :type connection_string: Optional[str]
        :param connection_string: the connection string of azure exporter
            to write into. If this is set to None, the extension will use
            an instrumentation connection string from your app settings.
        """
        cls._trace_integrations = config_integration.trace_integrations(
            libraries)

        cls._exporter = AzureExporter(connection_string=connection_string)
Beispiel #17
0
    def test_trace_integrations(self):
        mock_module = mock.Mock()
        mock_importlib = mock.Mock()
        mock_importlib.import_module.return_value = mock_module
        patch = mock.patch('opencensus.trace.config_integration.importlib',
                           mock_importlib)

        integration_list = ['mysql', 'postgresql']

        with patch:
            integrated = config_integration.trace_integrations(
                integration_list)

        self.assertTrue(mock_module.trace_integration.called)
        self.assertEqual(integrated, integration_list)
Beispiel #18
0
def create_app(config={}):
    app = Flask('aleph')
    app.config.from_object(settings)
    app.config.update(config)

    if 'postgres' not in settings.DATABASE_URI:
        raise RuntimeError("aleph database must be PostgreSQL!")

    app.config.update({
        'SQLALCHEMY_DATABASE_URI': settings.DATABASE_URI,
        'BABEL_DOMAIN': 'aleph'
    })

    queue = Queue(settings.QUEUE_NAME,
                  routing_key=settings.QUEUE_ROUTING_KEY,
                  queue_arguments={'x-max-priority': 9})
    celery.conf.update(
        imports=('aleph.queues'),
        broker_url=settings.BROKER_URI,
        task_always_eager=settings.EAGER,
        task_eager_propagates=True,
        task_ignore_result=True,
        task_acks_late=False,
        task_queues=(queue,),
        task_default_queue=settings.QUEUE_NAME,
        task_default_routing_key=settings.QUEUE_ROUTING_KEY,
        worker_max_tasks_per_child=1000,
        result_persistent=False,
        beat_schedule={
            'hourly': {
                'task': 'aleph.logic.scheduled.hourly',
                'schedule': crontab(hour='*', minute=0)
            },
            'daily': {
                'task': 'aleph.logic.scheduled.daily',
                'schedule': crontab(hour=5, minute=0)
            }
        },
    )

    migrate.init_app(app, db, directory=settings.ALEMBIC_DIR)
    configure_oauth(app)
    mail.init_app(app)
    db.init_app(app)
    babel.init_app(app)
    CORS(app, origins=settings.CORS_ORIGINS)

    # Enable raven to submit issues to sentry if a DSN is defined. This will
    # report errors from Flask and Celery operation modes to Sentry.
    if settings.SENTRY_DSN:
        sentry.init_app(app,
                        dsn=settings.SENTRY_DSN,
                        logging=True,
                        level=logging.ERROR)
        register_logger_signal(sentry.client)
        register_signal(sentry.client, ignore_expected=True)

    # This executes all registered init-time plugins so that other
    # applications can register their behaviour.
    for plugin in get_extensions('aleph.init'):
        plugin(app=app)
    # Set up opencensus tracing and its integrations. Export collected traces
    # to Stackdriver Trace on a background thread.
    if settings.STACKDRIVER_TRACE_PROJECT_ID:
        exporter = stackdriver_exporter.StackdriverExporter(
            project_id=settings.STACKDRIVER_TRACE_PROJECT_ID,
            transport=BackgroundThreadTransport
        )
        sampler = probability.ProbabilitySampler(
            rate=settings.TRACE_SAMPLING_RATE
        )
        blacklist_paths = ['/healthz', ]
        FlaskMiddleware(
            app, exporter=exporter, sampler=sampler,
            blacklist_paths=blacklist_paths
        )
        integrations = ['postgresql', 'sqlalchemy', 'httplib']
        config_integration.trace_integrations(integrations)
        # Set up logging
        setup_stackdriver_logging()
    return app
Beispiel #19
0
else:
    trace_exporter = file_exporter.FileExporter(file_name="traces")
    trace_sampler = samplers.AlwaysOnSampler()

middleware = FlaskMiddleware(
    app,
    excludelist_paths=["metadata", "computeMetadata"],  # Don't trace metadata requests
    sampler=trace_sampler,
    exporter=trace_exporter,
    propagator=google_cloud_format.GoogleCloudFormatPropagator(),
)
config_integration.trace_integrations(
    [
        # TODO(#4283): The 'google_cloud_clientlibs' integration is currently not compatible with the
        # 'proto-plus' objects used by the 2.0.0 versions of the client libraries. Investigate best way to hydrate
        # spans in traces for these calls in the future.
        "google_cloud_clientlibs",
        "requests",
        "sqlalchemy",
    ]
)


@zope.event.classhandler.handler(events.MemoryUsageThresholdExceeded)
def memory_condition_handler(event: events.MemoryUsageThresholdExceeded) -> None:
    logging.warning(
        "Memory usage %d is more than limit of %d, forcing gc",
        event.mem_usage,
        event.max_allowed,
    )
    gc.collect()
Beispiel #20
0
          'Temperatures for San Francisco, CA, US '),
    'Seattle, WA, US':
    Gauge('city_temp_seattle_wa_us', 'Temperatures for Seattle, WA, US'),
    'New York, NY, US':
    Gauge('city_temp_new_york_ny_us', 'Temperatures for New York, NY, US')
}

if os.environ.get('ENABLE_TRACING', None) is not None:
    from opencensus.ext.stackdriver import trace_exporter as stackdriver_exporter
    from opencensus.ext.flask.flask_middleware import FlaskMiddleware
    from opencensus.trace import config_integration

    project = os.environ.get('PROJECT_ID')
    exporter = stackdriver_exporter.StackdriverExporter(project_id=project)
    middleware = FlaskMiddleware(app, exporter=exporter)
    config_integration.trace_integrations(["requests"])


@app.route('/api/weather', methods=['GET'])
def current_weather():
    ret = []

    mgr = owm.weather_manager()
    for city, metric in city_metric.items():
        obs = mgr.weather_at_place(city)
        w = obs.weather
        temp = w.temperature('fahrenheit')
        conditions = {
            'location':
            city,
            'temp_cur':
Beispiel #21
0
        logging.warning(
            "Using AZURE_APPI_CONNECTION_STRING as AZURE_APPI_AUDIT_CONNECTION_STRING."
        )

    MIDDLEWARE.append("opencensus.ext.django.middleware.OpencensusMiddleware")
    OPENCENSUS = {
        "TRACE": {
            "SAMPLER": "opencensus.trace.samplers.ProbabilitySampler(rate=1)",
            "EXPORTER": f"""opencensus.ext.azure.trace_exporter.AzureExporter(
                connection_string='{AZURE_APPI_CONNECTION_STRING}',
                service_name='dso-api'
            )""",
            "EXCLUDELIST_PATHS": [],
        }
    }
    config_integration.trace_integrations(["logging"])
    azure_json = base_log_fmt.copy()
    azure_json.update({"message": "%(message)s"})
    audit_azure_json = {"audit": True}
    audit_azure_json.update(azure_json)
    LOGGING["formatters"]["azure"] = {"format": json.dumps(azure_json)}
    LOGGING["formatters"]["audit_azure"] = {
        "format": json.dumps(audit_azure_json)
    }
    LOGGING["handlers"]["azure"] = {
        "level": "DEBUG",
        "class": "opencensus.ext.azure.log_exporter.AzureLogHandler",
        "connection_string": AZURE_APPI_CONNECTION_STRING,
        "formatter": "azure",
    }
    LOGGING["handlers"]["audit_azure"] = {
Beispiel #22
0
from .rasterio_env import rio_env

import logging

# pylint: disable=invalid-name, broad-except

app = Flask(__name__.split('.')[0])
RequestID(app)

tracer = None
if opencensus_tracing_enabled():
    from opencensus.trace import config_integration
    from opencensus.trace.ext.flask.flask_middleware import FlaskMiddleware
    tracer = get_opencensus_tracer()
    integration = ['sqlalchemy']
    config_integration.trace_integrations(integration, tracer=tracer)
    jaegerExporter = get_jaeger_exporter()
    middleware = FlaskMiddleware(app, exporter=jaegerExporter)

handler = logging.StreamHandler()
handler.setFormatter(
    logging.Formatter(
        "[%(asctime)s] %(name)s [%(request_id)s] [%(levelname)s] %(message)s"))
handler.addFilter(RequestIDLogFilter())
_LOG = logging.getLogger()
_LOG.addHandler(handler)

# If invoked using Gunicorn, link our root logger to the gunicorn logger
# this will mean the root logs will be captured and managed by the gunicorn logger
# allowing you to set the gunicorn log directories and levels for logs
# produced by this application
Beispiel #23
0
from .forms import HelloForm

from opencensus.trace import config_integration
from opencensus.trace.exporters.ocagent import trace_exporter
from opencensus.trace import tracer as tracer_module
from opencensus.trace.propagation.trace_context_http_header_format import TraceContextPropagator
from opencensus.trace.exporters.transports.background_thread \
    import BackgroundThreadTransport

import time
import os
import requests

INTEGRATIONS = ['httplib']

service_name = os.getenv('SERVICE_NAME', 'python-service')
config_integration.trace_integrations(
    INTEGRATIONS,
    tracer=tracer_module.Tracer(exporter=trace_exporter.TraceExporter(
        service_name=service_name,
        endpoint=os.getenv('OCAGENT_TRACE_EXPORTER_ENDPOINT'),
        transport=BackgroundThreadTransport),
                                propagator=TraceContextPropagator()))


def call_go_app(request):
    requests.get("http://go-app:50030/call_aspnetcore_app")

    return HttpResponse("hello world from " + service_name)
Beispiel #24
0
import logging

from opencensus.ext.azure.log_exporter import AzureLogHandler
from opencensus.ext.azure.trace_exporter import AzureExporter
from opencensus.trace import config_integration
from opencensus.trace.samplers import ProbabilitySampler
from opencensus.trace.tracer import Tracer


config_integration.trace_integrations(['logging'])

logger = logging.getLogger(__name__)

handler = AzureLogHandler(connection_string='InstrumentationKey=<Your Key>')

handler.setFormatter(logging.Formatter('%(traceId)s %(spanId)s %(message)s'))

logger.addHandler(handler)

tracer = Tracer(
    exporter=AzureExporter(connection_string='InstrumentationKey=<Your Key>'),
    sampler=ProbabilitySampler(1.0)

)

logger.warning('Before the span')
with tracer.span(name='azka_test_1'):
    logger.warning('In the span azka_test_1')
with tracer.span(name='azka_test_2'):
    logger.warning('In the span azka_test_2')
logger.warning('After the span')
Beispiel #25
0
    'opencensus.ext.django.middleware.OpencensusMiddleware',
    'django_otp.middleware.OTPMiddleware',
]

OPENCENSUS_TRACE = {
    'SAMPLER': 'opencensus.trace.samplers.always_on.AlwaysOnSampler',
    'EXPORTER': 'opencensus.trace.exporters.print_exporter.PrintExporter',
    #    'PROPAGATOR':
    #    'opencensus.trace.propagation.google_cloud_format.'
    #    'GoogleCloudFormatPropagator',
}

OPENCENSUS_TRACE_PARAMS = {
    'SAMPLING_RATE': 0.5,
    'SERVICE_NAME': 'mithras',
}

integration = ['postgresql']

config_integration.trace_integrations(integration)

SERVER_EMAIL = '*****@*****.**'
USE_TZ = False

LOGIN_URL = 'two_factor:login'

# this one is optional
LOGIN_REDIRECT_URL = 'two_factor:profile'

SECURE_BROWSER_XSS_FILTER = True
Beispiel #26
0
                       url_prefix='/scrape_aggregate_reports')
app.register_blueprint(store_single_count_blueprint,
                       url_prefix='/single_count')
app.register_blueprint(export_manager_blueprint, url_prefix='/export_manager')
app.register_blueprint(backup_manager_blueprint, url_prefix='/backup_manager')
app.register_blueprint(dataflow_monitor_blueprint,
                       url_prefix='/dataflow_monitor')
app.register_blueprint(validation_manager_blueprint,
                       url_prefix='/validation_manager')
app.register_blueprint(calculation_data_storage_manager_blueprint,
                       url_prefix='/calculation_data_storage_manager')

if environment.in_gae():
    SQLAlchemyEngineManager.init_engines_for_server_postgres_instances()

# Export traces and metrics to stackdriver if running on GAE
if environment.in_gae():
    monitoring.register_stackdriver_exporter()
    trace_exporter = stackdriver_trace.StackdriverExporter(
        project_id=metadata.project_id(), transport=AsyncTransport)
else:
    trace_exporter = file_exporter.FileExporter(file_name='traces')

# Setup tracing
app.config['OPENCENSUS_TRACE_PARAMS'] = {
    'BLACKLIST_HOSTNAMES': ['metadata']  # Don't trace metadata requests
}
middleware = FlaskMiddleware(app, exporter=trace_exporter)
config_integration.trace_integrations(
    ['google_cloud_clientlibs', 'requests', 'sqlalchemy'])
Beispiel #27
0
    terms,
    error_handler,
    reset_password,
    enter_verification_code,
    change_password,
    edit_profile,
    disable_user,
    revoke_consent,
    show_oauth2_clients,
    delete_client,
)

# Import models here for SQLAlchemy to detect them
from .models import *

config_integration.trace_integrations(['requests'])
config_integration.trace_integrations(['sqlalchemy'])

# -- Flask setup -------------------------------------------------------------

app_kwargs = dict(
    import_name=PROJECT_NAME,
    template_folder=TEMPLATES_DIR,
    static_url_path='/static',
    static_folder=STATIC_DIR,
)

app = Flask(**app_kwargs)
app.logger.setLevel(logging.DEBUG)
app.config['SECRET_KEY'] = SECRET
PROJECT = os.environ.get('GCLOUD_PROJECT_PYTHON')

# MySQL settings
MYSQL_PASSWORD = os.environ.get('SYSTEST_MYSQL_PASSWORD')

# PostgreSQL settings
POSTGRES_PASSWORD = os.environ.get('SYSTEST_POSTGRES_PASSWORD')

app = flask.Flask(__name__)

# Enable tracing, configure the trace params, send traces to Stackdriver Trace
exporter = stackdriver_exporter.StackdriverExporter(
    project_id='yanhuili-sandbox')
sampler = probability.ProbabilitySampler(rate=1)
middleware = FlaskMiddleware(app, exporter=exporter, sampler=sampler)
config_integration.trace_integrations(INTEGRATIONS)


@app.route('/')
def hello():
    return 'Hello world!'


@app.route('/requests')
def trace_requests():
    response = requests.get('http://www.google.com')
    return str(response.status_code)


@app.route('/mysql')
def mysql_query():
Beispiel #29
0
# Copyright 2019, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import requests

from opencensus.ext.azure.trace_exporter import AzureExporter
from opencensus.trace import config_integration
from opencensus.trace.tracer import Tracer

if __name__ == '__main__':
    config_integration.trace_integrations(['requests'])
    # TODO: you need to specify the instrumentation key in the
    # APPINSIGHTS_INSTRUMENTATIONKEY environment variable.
    tracer = Tracer(exporter=AzureExporter())
    with tracer.span(name='parent'):
        with tracer.span(name='child'):
            response = requests.get(url='http://localhost:8080/')
            print(response.status_code)
            print(response.text)
Beispiel #30
0
import time
import random
import socket
import os
import flask
import requests

exporter=trace_exporter.TraceExporter(
        service_name=os.getenv('SERVICE_NAME'),
        endpoint=os.getenv('COLLECTOR'))

tracer = Tracer(sampler=AlwaysOnSampler(), exporter=exporter, propagator=B3FormatPropagator())

integration = ['requests']

config_integration.trace_integrations(integration)

app = flask.Flask(__name__)
middleware = FlaskMiddleware(app, exporter=exporter, sampler=AlwaysOnSampler(), propagator=B3FormatPropagator(), blacklist_paths=['_ah/health'])

@app.route('/')
def init():
    with tracer.span(name='Initiate'):
        time.sleep(random.random())
        with tracer.span(name='GetDataFromOutside'):
            response = requests.get(os.getenv('REMOTE_ENDPOINT'))
        with tracer.span(name='ProcessData'):
            time.sleep(random.random())
    return 'OK'

@app.route('/data')
Beispiel #31
0
    SAMPLER = (SETTINGS.get('SAMPLER', None) or ProbabilitySampler(rate=1.0))
    if isinstance(SAMPLER, six.string_types):
        SAMPLER = configuration.load(SAMPLER)

    EXPORTER = SETTINGS.get('EXPORTER', None) or PrintExporter()
    if isinstance(EXPORTER, six.string_types):
        EXPORTER = configuration.load(EXPORTER)

    TRACER = Tracer(
        exporter=EXPORTER,
        sampler=SAMPLER,
    )

    # Add tracing for PostgreSQL
    config_integration.trace_integrations(['postgresql'])

    # Configure logging from settings.py
    logging.config.dictConfig(getattr(django.conf.settings, 'LOGGING', {}))

    # Add logging integration
    config_integration.trace_integrations(['logging'])
    logger = logging.getLogger(__name__)

    if getattr(django.conf.settings, 'DEBUG'):
        try:
            from logging_tree import printout
            printout()
        except:
            pass  # optional logging_tree not in venv.