def test_pin_config(self): # ensure `Pin` has a configuration object that can be modified obj = self.Obj() Pin.override(obj, service='metrics') pin = Pin.get_from(obj) ok_(pin._config is not None) pin._config['distributed_tracing'] = True ok_(pin._config['distributed_tracing'] is True)
def test_override_missing(self): # ensure overriding an instance doesn't override the Class class A(object): pass a = A() ok_(Pin.get_from(a) is None) Pin.override(a, service='metrics') eq_(Pin.get_from(a).service, 'metrics') b = A() ok_(Pin.get_from(b) is None)
def test_override(self): # ensure Override works for an instance object class A(object): pass Pin(service='metrics', app='flask').onto(A) a = A() Pin.override(a, app='django') eq_(Pin.get_from(a).app, 'django') eq_(Pin.get_from(a).service, 'metrics') b = A() eq_(Pin.get_from(b).app, 'flask') eq_(Pin.get_from(b).service, 'metrics')
def test_pin_config_is_a_copy(self): # ensure that when a `Pin` is cloned, the config is a copy obj = self.Obj() Pin.override(obj, service='metrics') p1 = Pin.get_from(obj) ok_(p1._config is not None) p1._config['distributed_tracing'] = True Pin.override(obj, service='intake') p2 = Pin.get_from(obj) ok_(p2._config is not None) p2._config['distributed_tracing'] = False ok_(p1._config['distributed_tracing'] is True) ok_(p2._config['distributed_tracing'] is False)
def test_pin_does_not_override_global(self): # ensure that when a `Pin` is created from a class, the specific # instance doesn't override the global one class A(object): pass Pin.override(A, service='metrics') global_pin = Pin.get_from(A) global_pin._config['distributed_tracing'] = True a = A() pin = Pin.get_from(a) ok_(pin is not None) ok_(pin._config['distributed_tracing'] is True) pin._config['distributed_tracing'] = False ok_(global_pin._config['distributed_tracing'] is True) ok_(pin._config['distributed_tracing'] is False)
def aiobotocore_client(service, tracer): """Helper function that creates a new aiobotocore client so that it is closed at the end of the context manager. """ session = aiobotocore.session.get_session() endpoint = LOCALSTACK_ENDPOINT_URL[service] client = session.create_client( service, region_name='us-west-2', endpoint_url=endpoint, aws_access_key_id='aws', aws_secret_access_key='aws', aws_session_token='aws', ) Pin.override(client, tracer=tracer) try: yield client finally: client.close()
def setUp(self): patch() self.tracer = get_dummy_tracer() Pin.override(Template, tracer=self.tracer)
def setUp(self): patch() self.tracer = get_dummy_tracer() Pin.override(molten, tracer=self.tracer)
def setUp(self): super(GrpcTestCase, self).setUp() patch() Pin.override(constants.GRPC_PIN_MODULE_SERVER, tracer=self.tracer) Pin.override(constants.GRPC_PIN_MODULE_CLIENT, tracer=self.tracer) self._start_server()
def pytest_configure(config): if Pin.get_from(config) is not None: Pin.override(config, tracer=self.tracer)
def make_client(self, mock_socket_values, **kwargs): tracer = DummyTracer() Pin.override(pymemcache, tracer=tracer) self.client = pymemcache.client.base.Client((TEST_HOST, TEST_PORT), **kwargs) self.client.sock = MockSocket(list(mock_socket_values)) return self.client
}) ## Trace patch for MySQL #patch(mysql=True) ## Connecting MySQL mydb = mysql.connector.connect( host=db_config.db_host, user=db_config.db_username, passwd=db_config.db_password, database=db_config.db_name ) mycursor = mydb.cursor() ## Use a pin to specify metadata related to this connection Pin.override(mydb, service='kikeyama_mysql') ## Flask app = Flask(__name__) #traced_app = TraceMiddleware(app, tracer, service="kikeyama_service", distributed_tracing=False) traced_app = TraceMiddleware(app, tracer, service='kikeyama_service') # Enable distributed tracing ddtrace.config.flask['distributed_tracing_enabled'] = True @app.route('/') def api_entry(): start_time = time.time() app.logger.info('getting root endpoint') # return 'Entrypoint to the Application'
from concurrent.futures import ThreadPoolExecutor # env vars for deploying purpose DATADOG_TRACER = os.getenv('DATADOG_TRACER', 'localhost') PORT = int(os.getenv('APP_PORT', '8000')) BASE_DIR = os.path.dirname(os.path.realpath(__file__)) STATIC_DIR = os.path.join(BASE_DIR, 'statics') # configure the tracer tracer.configure(hostname=DATADOG_TRACER) # patch redis with Pin patch(redis=True) url = os.getenv('REDIS_URL', 'redis://*****:*****@tracer.wrap('tornado.unit_counter') def count_unit(): # this call is automatically traced client.incr('tornado:unit_counter') class MainHandler(RequestHandler): @coroutine def get(self): yield gen_sleep(0.05) yield self.delayed_work()
def setup(self): super().setup() Pin.override(self.engine, service="monitoring-service")
def get_redis_and_tracer(self): tracer = get_dummy_tracer() r = redis.Redis(port=REDIS_CONFIG['port']) Pin.override(r, service=self.TEST_SERVICE, tracer=tracer) return r, tracer
def setUp(self): super(Jinja2Test, self).setUp() patch() # prevent cache effects when using Template('code...') jinja2.environment._spontaneous_environments.clear() Pin.override(jinja2.environment.Environment, tracer=self.tracer)
async def test_pin(redis_client): Pin.override(redis_client, service="my-aioredis") val = await redis_client.get("cheese") assert val is None
def get_count(client): val = client.get(KEY) val = int(val) if val else None return val or INITIAL_VALUE def store_count(client, count): client.set(KEY, count) def increment_count(count): return count + INCR_AMOUNT def run_app(client): count = get_count(client) count = increment_count(count) store_count(client, count) print("current count {}".format(count)) if __name__ == "__main__": client = pymemcache.client.base.Client((MEM_HOST, MEM_PORT)) # configure the client with some custom metadata Pin.override(client, service="memcached") run_app(client)
def test_sync_worker_pin_service(queue): job = queue.enqueue(job_add1, 10) worker = rq.SimpleWorker([queue], connection=queue.connection) Pin.override(worker, service="my-pin-svc") worker.work(burst=True) assert job.result == 11
import os import asyncio import ddtrace import redis from ddtrace import tracer, Pin, patch from ddtrace.contrib.asyncio import context_provider, helpers DATADOG_TRACER = os.getenv('DATADOG_TRACER', 'localhost') tracer.configure(hostname=DATADOG_TRACER, context_provider=context_provider) # patch redis patch(redis=True) url = os.getenv('REDIS_URL', 'redis://localhost:6379') client = redis.StrictRedis.from_url(url, db=0) Pin.override(client, service='asyncio-redis-queue') async def get_redis_value(): with tracer.trace('async.cache'): # we may not block here await asyncio.sleep(0.01) value = client.get('sync:key') return client.set('sync:key', 42) async def delayed_job(parent_span): with tracer.start_child_span('async.worker', parent_span, service='asyncio-workers') as span: await asyncio.sleep(3)
def setUp(self): patch() self.tracer = get_dummy_tracer() Pin.override(molten, tracer=self.tracer, service=self.TEST_SERVICE)
def setUp(self): super(MakoTest, self).setUp() patch() Pin.override(Template, tracer=self.tracer)
def setUp(self): super(TestMoltenDI, self).setUp() patch() Pin.override(molten, tracer=self.tracer, service=self.TEST_SERVICE)
logger = logging.getLogger() parser = argparse.ArgumentParser(description='Concurrent Traffic Generator') parser.add_argument('concurrent', type=int, help='Number of Concurrent Requests') parser.add_argument('total', type=int, help='Total number of Requests to Make') parser.add_argument('url', type=str, help='URL to fetch') args = parser.parse_args() NODE_URL = f"http://{os.environ['NODE_API_SERVICE_HOST']}:{os.environ['NODE_API_SERVICE_PORT']}" asyncio.set_event_loop(asyncio.new_event_loop()) session = AsyncSession(n=args.concurrent) Pin.override(session, service='concurrent-requests-generator') async def generate_requests(): with tracer.trace('flask.request', service='concurrent-requests-generator') as span: rs = [] for _ in range(args.total): rs.append(session.get(NODE_URL + args.url)) for i in range(args.total): rs[i] = await rs[i] print(rs) session.run(generate_requests) session.close()
def setUp(self): super(CeleryBaseTestCase, self).setUp() self.pin = Pin(service="celery-unittest", tracer=self.tracer) # override pins to use our Dummy Tracer Pin.override(self.app, tracer=self.tracer)
from ddtrace import Pin, patch import grpc from hello_pb2 import HelloRequest from hello_pb2_grpc import HelloStub # Use a pin to specify metadata related to this connection Pin.override(grpc, service='demo.grpc') def run(): # If not patched yet, you can patch grpc specifically patch(grpc=True) with grpc.insecure_channel('localhost:50051') as channel: stub = HelloStub(channel) response = stub.SayHello(HelloRequest(name="test")) print(response) if __name__ == '__main__': run()
def setUp(self): super(TestMolten, self).setUp() patch() Pin.override(molten, tracer=self.tracer)
'version': 1, 'formatters': { 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' }, }, 'handlers': { 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose', }, }, 'loggers': { 'ddtrace': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': True, }, } }) if __name__ == '__main__': patch(pymongo=True) client = pymongo.MongoClient() pin = Pin.override(client, service="mongo-master----------------s") db = client.test_database collection = db.test_collection collection.insert_one({"name": "Luca"})
'handlers': { 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose', }, }, 'loggers': { 'ddtrace': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': True, }, } }) # If not patched yet, you can patch mysql specifically patch(mysql=True) # This will report a span with the default settings conn = MySQLdb.connect(user="******", password="******", host="localhost", port=3306, database="test") cursor = conn.cursor() cursor.execute("SHOW TABLES") # Use a pin to specify metadata related to this connection Pin.override(conn, service='mysql-users')
def get_redis_and_tracer(self): tracer = get_dummy_tracer() r = self._get_test_client() Pin.override(r, service=self.TEST_SERVICE, tracer=tracer) return r, tracer
def get_connection(tracer): connection = mariadb.connect(**MARIADB_CONFIG) Pin.override(connection, tracer=tracer) return connection
def setUp(self): super(FlaskAutopatchTestCase, self).setUp() self.app = flask.Flask(__name__) Pin.override(self.app, service="test-flask", tracer=self.tracer) self.client = self.app.test_client()
'login': '******', 'signup': 'doctor.forms.SignupForm', 'reset_password': '******', } SOCIALACCOUNT_FORMS = { 'signup': 'doctor.forms.SocialSignupForm', } SOCIALACCOUNT_QUERY_EMAIL = True ACCOUNT_USER_MODEL_USERNAME_FIELD = None ACCOUNT_USERNAME_REQUIRED = False ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https' ACCOUNT_ADAPTER = 'doctor.adapters.AccountAdapter' SOCIALACCOUNT_ADAPTER = 'doctor.adapters.SocialAccountAdapter' # Crispy CRISPY_TEMPLATE_PACK = 'bootstrap4' CRISPY_FAIL_SILENTLY = not DEBUG EMAIL_FROM = '*****@*****.**' CONTACT_PHONE = '+37065253669' # VINTRA IVPK_CLIENT_ID = env.str('IVPK_SAUGYKLA_CLIENT_ID', None) IVPK_CLIENT_SECRET = env.str('IVPK_SAUGYKLA_CLIENT_SECRET', None) if not DEBUG: patch_all() Pin.override(Pin.get_from(django))
def setUp(self): self.tracer = get_dummy_tracer() self.app = flask.Flask(__name__) Pin.override(self.app, service='test-flask', tracer=self.tracer) self.client = self.app.test_client()
from ddtrace import Pin from flask import abort, Blueprint, render_template_string from .limiter import limiter # Create a new Blueprint bp = Blueprint('bp', __name__, url_prefix='/bp/') # Just showing that we can override the service set for this blueprint Pin.override(bp, service='flask-bp', app='flask', app_type='web') # Hook to run before each blueprint request @bp.before_request def bp_before_request(): print('Hook: bp_before_request') # Hook to run before each app request @bp.before_app_request def bp_before_app_request(): print('Hook: bp_before_app_request') # Hook to run before the first app request @bp.before_app_first_request def bp_before_app_first_request(): print('Hook: bp_before_app_first_request')