def get_opencensus_sampler(): from opencensus.trace.samplers import probability, always_on sample_rate = int(os.getenv("OPENCENSUS_SAMPLE_RATE", "100")) if not sample_rate < 100 and not sample_rate > 0: return always_on.AlwaysOnSampler else: return probability.ProbabilitySampler(rate=(sample_rate / 100.0))
def run_app(): settings = { 'OPENCENSUS': { 'TRACE': { 'EXPORTER': print_exporter.PrintExporter(), 'SAMPLER': probability.ProbabilitySampler(rate=1), } } } app = main({}, **settings) server = make_server('localhost', 8080, app) server.serve_forever()
def prerun_task_span(task_id=None, task=None, *args, **kwargs): if settings.STACKDRIVER_TRACE_PROJECT_ID: exporter = stackdriver_exporter.StackdriverExporter( project_id=settings.STACKDRIVER_TRACE_PROJECT_ID, transport=BackgroundThreadTransport) sampler = probability.ProbabilitySampler( rate=settings.CELERY_TRACE_SAMPLING_RATE) tracer = tracer_module.Tracer(exporter=exporter, sampler=sampler) span = tracer.start_span() span.name = '[celery]{0}'.format(task.name) execution_context.set_opencensus_tracer(tracer) span.add_attribute('args', str(kwargs['args'])) span.add_attribute('kwargs', str(kwargs['kwargs'])) execution_context.set_current_span(span)
def run_app(): settings = {} exporter = print_exporter.PrintExporter() sampler = probability.ProbabilitySampler(rate=1) settings['OPENCENSUS_TRACE'] = { 'EXPORTER': exporter, 'SAMPLER': sampler, } app = main({}, **settings) server = make_server('localhost', 8080, app) server.serve_forever()
DB_HOST = '127.0.0.1' PROJECT = os.environ.get('GCLOUD_PROJECT_PYTHON') # MySQL settings MYSQL_PASSWORD = os.environ.get('SYSTEST_MYSQL_PASSWORD') # PostgreSQL settings POSTGRES_PASSWORD = os.environ.get('SYSTEST_POSTGRES_PASSWORD') app = flask.Flask(__name__) # Enable tracing, configure the trace params, send traces to Stackdriver Trace exporter = stackdriver_exporter.StackdriverExporter( project_id='yanhuili-sandbox') sampler = probability.ProbabilitySampler(rate=1) middleware = FlaskMiddleware(app, exporter=exporter, sampler=sampler) config_integration.trace_integrations(INTEGRATIONS) @app.route('/') def hello(): return 'Hello world!' @app.route('/requests') def trace_requests(): response = requests.get('http://www.google.com') return str(response.status_code)
def create_app(config={}): app = Flask('aleph') app.config.from_object(settings) app.config.update(config) if 'postgres' not in settings.DATABASE_URI: raise RuntimeError("aleph database must be PostgreSQL!") app.config.update({ 'SQLALCHEMY_DATABASE_URI': settings.DATABASE_URI, 'BABEL_DOMAIN': 'aleph' }) queue = Queue(settings.QUEUE_NAME, routing_key=settings.QUEUE_ROUTING_KEY, queue_arguments={'x-max-priority': 9}) celery.conf.update( imports=('aleph.queues'), broker_url=settings.BROKER_URI, task_always_eager=settings.EAGER, task_eager_propagates=True, task_ignore_result=True, task_acks_late=False, task_queues=(queue,), task_default_queue=settings.QUEUE_NAME, task_default_routing_key=settings.QUEUE_ROUTING_KEY, worker_max_tasks_per_child=1000, result_persistent=False, beat_schedule={ 'hourly': { 'task': 'aleph.logic.scheduled.hourly', 'schedule': crontab(hour='*', minute=0) }, 'daily': { 'task': 'aleph.logic.scheduled.daily', 'schedule': crontab(hour=5, minute=0) } }, ) migrate.init_app(app, db, directory=settings.ALEMBIC_DIR) configure_oauth(app) mail.init_app(app) db.init_app(app) babel.init_app(app) CORS(app, origins=settings.CORS_ORIGINS) # Enable raven to submit issues to sentry if a DSN is defined. This will # report errors from Flask and Celery operation modes to Sentry. if settings.SENTRY_DSN: sentry.init_app(app, dsn=settings.SENTRY_DSN, logging=True, level=logging.ERROR) register_logger_signal(sentry.client) register_signal(sentry.client, ignore_expected=True) # This executes all registered init-time plugins so that other # applications can register their behaviour. for plugin in get_extensions('aleph.init'): plugin(app=app) # Set up opencensus tracing and its integrations. Export collected traces # to Stackdriver Trace on a background thread. if settings.STACKDRIVER_TRACE_PROJECT_ID: exporter = stackdriver_exporter.StackdriverExporter( project_id=settings.STACKDRIVER_TRACE_PROJECT_ID, transport=BackgroundThreadTransport ) sampler = probability.ProbabilitySampler( rate=settings.TRACE_SAMPLING_RATE ) blacklist_paths = ['/healthz', ] FlaskMiddleware( app, exporter=exporter, sampler=sampler, blacklist_paths=blacklist_paths ) integrations = ['postgresql', 'sqlalchemy', 'httplib'] config_integration.trace_integrations(integrations) # Set up logging setup_stackdriver_logging() return app
def test_should_sample_trace_id_not_sampled(self): trace_id = 'ffffffffffffffffffffffffffffffff' sampler = probability.ProbabilitySampler(rate=0.5) should_sample = sampler.should_sample(trace_id=trace_id) self.assertFalse(should_sample)
def test_should_sample_trace_id_sampled(self): trace_id = '00000000000000000000000000000000' sampler = probability.ProbabilitySampler(rate=0.5) should_sample = sampler.should_sample(trace_id=trace_id) self.assertTrue(should_sample)
def test_should_sample_greater(self): trace_id = 'f8739df974a4481f98748cd92b27177d' sampler = probability.ProbabilitySampler(rate=0) should_sample = sampler.should_sample(trace_id=trace_id) self.assertFalse(should_sample)
def test_constructor_default(self): rate = 0.5 trace_id = 'f8739df974a4481f98748cd92b27177d' sampler = probability.ProbabilitySampler() self.assertEqual(sampler.rate, rate)
def test_constructor_invalid(self): with self.assertRaises(ValueError): probability.ProbabilitySampler(rate=2)
def test_constructor_default(self): rate = 0.5 sampler = probability.ProbabilitySampler() self.assertEqual(sampler.rate, rate)
def test_constructor_valid(self): rate = 0.8 sampler = probability.ProbabilitySampler(rate=rate) self.assertEqual(sampler.rate, rate)