def setUp(self): super(TestRedisPatch, self).setUp() patch() r = redis.Redis(port=self.TEST_PORT) r.flushall() Pin.override(r, service=self.TEST_SERVICE, tracer=self.tracer) self.r = r
def test_cursor_override(self): """Test overriding the tracer with our own.""" conn, cur = self.test_conn Pin.override(cur, tracer=self.test_tracer) with conn: cur.execute( "INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute('SELECT * FROM {};'.format(TEST_TABLE)) spans = self.test_tracer.writer.pop() assert len(spans) == 2 # check all the metadata assert spans[0].service == 'vertica' assert spans[0].span_type == 'sql' assert spans[0].name == 'vertica.query' assert spans[0].get_metric('db.rowcount') == -1 query = "INSERT INTO test_table (a, b) VALUES (1, 'aa');" assert spans[0].resource == query assert spans[0].get_tag('out.host') == '127.0.0.1' assert spans[0].get_tag('out.port') == '5433' assert spans[1].resource == 'SELECT * FROM test_table;'
def test_blueprint_request_pin_override(self): """ When making a request to a Blueprint's endpoint When we attach a ``Pin`` to the Blueprint We create the expected spans """ bp = flask.Blueprint('bp', __name__) Pin.override(bp, service='flask-bp', tracer=self.tracer) @bp.route('/') def test(): return 'test' self.app.register_blueprint(bp) # Request the endpoint self.client.get('/') # Only extract the span we care about # DEV: Making a request creates a bunch of lifecycle spans, # ignore them, we test them elsewhere span = self.find_span_by_name(self.get_spans(), 'bp.test') self.assertEqual(span.service, 'flask-bp') self.assertEqual(span.name, 'bp.test') self.assertEqual(span.resource, '/') self.assertEqual(span.meta, dict())
def make_client(self, mock_socket_values, **kwargs): tracer = get_dummy_tracer() Pin.override(pymemcache, tracer=tracer) self.client = pymemcache.client.base.Client((TEST_HOST, TEST_PORT), **kwargs) self.client.sock = MockSocket(list(mock_socket_values)) return self.client
def setUp(self): super(TestRedisPatch, self).setUp() patch() r = self._get_test_client() r.flushall() Pin.override(r, service=self.TEST_SERVICE, tracer=self.tracer) self.r = r
def test_route_success(self): """ Tests request was a success with the expected span tags """ response = molten_client() spans = self.tracer.writer.pop() self.assertEqual(response.status_code, 200) # TestResponse from TestClient is wrapper around Response so we must # access data property self.assertEqual(response.data, '"Hello 24 year old named Jim!"') span = spans[0] self.assertEqual(span.service, 'molten') self.assertEqual(span.name, 'molten.request') self.assertEqual(span.resource, 'GET /hello/{name}/{age}') self.assertEqual(span.get_tag('http.method'), 'GET') self.assertEqual(span.get_tag(http.URL), 'http://127.0.0.1:8000/hello/Jim/24') self.assertEqual(span.get_tag('http.status_code'), '200') assert http.QUERY_STRING not in span.meta # See test_resources below for specifics of this difference if MOLTEN_VERSION >= (0, 7, 2): self.assertEqual(len(spans), 18) else: self.assertEqual(len(spans), 16) # test override of service name Pin.override(molten, service=self.TEST_SERVICE) response = molten_client() spans = self.tracer.writer.pop() self.assertEqual(spans[0].service, 'molten-patch')
def setUp(self): patch() # prevent cache effects when using Template('code...') jinja2.environment._spontaneous_environments.clear() # provide a dummy tracer self.tracer = get_dummy_tracer() Pin.override(jinja2.environment.Environment, tracer=self.tracer)
def test_configuration_routine(self): """Ensure that the integration routines can be configured.""" routine_config = dict(patch={ 'vertica_python.vertica.connection.Connection': dict(routines=dict(cursor=dict( operation_name='get_cursor', trace_enabled=True, ), ), ), }, ) # Make a copy of the vertica config first before we merge our settings over # DEV: First argument gets merged into the second copy = deepmerge(config.vertica, dict()) overrides = deepmerge(routine_config, copy) with self.override_config('vertica', overrides): patch() import vertica_python test_tracer = get_dummy_tracer() conn = vertica_python.connect(**VERTICA_CONFIG) Pin.override(conn, service='mycustomservice', tracer=test_tracer) conn.cursor() # should be traced now conn.close() spans = test_tracer.writer.pop() assert len(spans) == 1 assert spans[0].name == 'get_cursor' assert spans[0].service == 'mycustomservice'
def test_execute_metadata(self): """Metadata related to an `execute` call should be captured.""" conn, cur = self.test_conn Pin.override(cur, tracer=self.test_tracer) with conn: cur.execute( "INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute('SELECT * FROM {};'.format(TEST_TABLE)) spans = self.test_tracer.writer.pop() assert len(spans) == 2 # check all the metadata assert spans[0].service == 'vertica' assert spans[0].span_type == 'sql' assert spans[0].name == 'vertica.query' assert spans[0].get_metric('db.rowcount') == -1 query = "INSERT INTO test_table (a, b) VALUES (1, 'aa');" assert spans[0].resource == query assert spans[0].get_tag('out.host') == '127.0.0.1' assert spans[0].get_tag('out.port') == '5433' assert spans[0].get_tag('db.name') == 'docker' assert spans[0].get_tag('db.user') == 'dbadmin' assert spans[1].resource == 'SELECT * FROM test_table;'
def test_pin_config(self): # ensure `Pin` has a configuration object that can be modified obj = self.Obj() Pin.override(obj, service='metrics') pin = Pin.get_from(obj) assert pin._config is not None pin._config['distributed_tracing'] = True assert pin._config['distributed_tracing'] is True
def test_service_name_override(self): client = self.make_client( [b'STORED\r\n', b'VALUE key 0 5\r\nvalue\r\nEND\r\n']) Pin.override(client, service='testsvcname') client.set(b'key', b'value', noreply=False) result = client.get(b'key') assert _str(result) == 'value' spans = self.get_spans() self.assertEqual(spans[0].service, 'testsvcname') self.assertEqual(spans[1].service, 'testsvcname')
def setUp(self): super(CeleryBaseTestCase, self).setUp() # instrument Celery and create an app with Broker and Result backends patch() self.pin = Pin(service='celery-unittest', tracer=self.tracer) self.app = Celery('celery.test_app', broker=BROKER_URL, backend=BACKEND_URL) # override pins to use our Dummy Tracer Pin.override(self.app, tracer=self.tracer)
def test_override_parent_pin(self): """Test that the service set on `pymemcache` is used for Clients.""" Pin.override(pymemcache, service='mysvc') client = self.make_client( [b'STORED\r\n', b'VALUE key 0 5\r\nvalue\r\nEND\r\n']) client.set(b'key', b'value', noreply=False) pin = Pin.get_from(pymemcache) tracer = pin.tracer spans = tracer.writer.pop() self.assertEqual(spans[0].service, 'mysvc')
def setUp(self): super(SQLAlchemyPatchTestCase, self).setUp() # create a traced engine with the given arguments # and configure the current PIN instance patch() dsn = 'postgresql://%(user)s:%(password)s@%(host)s:%(port)s/%(dbname)s' % POSTGRES_CONFIG self.engine = sqlalchemy.create_engine(dsn) Pin.override(self.engine, tracer=self.tracer) # prepare a connection self.conn = self.engine.connect()
def setUp(self): super(TestConsulPatch, self).setUp() patch() c = consul.Consul(host=CONSUL_CONFIG['host'], port=CONSUL_CONFIG['port']) Pin.override(consul.Consul, service=self.TEST_SERVICE, tracer=self.tracer) Pin.override(consul.Consul.KV, service=self.TEST_SERVICE, tracer=self.tracer) self.c = c
def setUp(self): super(TestKombuPatch, self).setUp() conn = kombu.Connection('amqp://*****:*****@127.0.0.1:{p}//'.format(p=self.TEST_PORT)) conn.connect() producer = conn.Producer() Pin.override(producer, service=self.TEST_SERVICE, tracer=self.tracer) self.conn = conn self.producer = producer patch()
def test_override_missing(self): # ensure overriding an instance doesn't override the Class class A(object): pass a = A() assert Pin.get_from(a) is None Pin.override(a, service='metrics') assert Pin.get_from(a).service == 'metrics' b = A() assert Pin.get_from(b) is None
def test_analytics_default(self): conn, cur = self.test_conn Pin.override(cur, tracer=self.test_tracer) with conn: cur.execute( "INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute('SELECT * FROM {};'.format(TEST_TABLE)) spans = self.test_tracer.writer.pop() self.assertEqual(len(spans), 2) self.assertIsNone(spans[0].get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_override(self): # ensure Override works for an instance object class A(object): pass Pin(service='metrics', app='flask').onto(A) a = A() Pin.override(a, app='django') assert Pin.get_from(a).app == 'django' assert Pin.get_from(a).service == 'metrics' b = A() assert Pin.get_from(b).app == 'flask' assert Pin.get_from(b).service == 'metrics'
def test_analytics_without_rate(self): with self.override_config('vertica', dict(analytics_enabled=True)): conn, cur = self.test_conn Pin.override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format( TEST_TABLE)) cur.execute('SELECT * FROM {};'.format(TEST_TABLE)) spans = self.test_tracer.writer.pop() self.assertEqual(len(spans), 2) self.assertEqual(spans[0].get_metric(ANALYTICS_SAMPLE_RATE_KEY), 1.0)
def test_pin_config_is_a_copy(self): # ensure that when a `Pin` is cloned, the config is a copy obj = self.Obj() Pin.override(obj, service='metrics') p1 = Pin.get_from(obj) assert p1._config is not None p1._config['distributed_tracing'] = True Pin.override(obj, service='intake') p2 = Pin.get_from(obj) assert p2._config is not None p2._config['distributed_tracing'] = False assert p1._config['distributed_tracing'] is True assert p2._config['distributed_tracing'] is False
def test_unpatch_patch(self): """ Tests unpatch-patch cycle """ unpatch() self.assertIsNone(Pin.get_from(molten)) molten_client() spans = self.tracer.writer.pop() self.assertEqual(len(spans), 0) patch() # Need to override Pin here as we do in setUp Pin.override(molten, tracer=self.tracer) self.assertTrue(Pin.get_from(molten) is not None) molten_client() spans = self.tracer.writer.pop() self.assertTrue(len(spans) > 0)
def test_engine_pin_service(self): # ensures that the engine service is updated with the PIN object Pin.override(self.engine, service='replica-db') rows = self.conn.execute('SELECT 1').fetchall() assert len(rows) == 1 traces = self.tracer.writer.pop_traces() # trace composition assert len(traces) == 1 assert len(traces[0]) == 1 span = traces[0][0] # check subset of span fields assert span.name == 'postgres.query' assert span.service == 'replica-db' assert span.error == 0 assert span.duration > 0
def make_client_pool(self, hostname, mock_socket_values, serializer=None, **kwargs): mock_client = pymemcache.client.base.Client(hostname, serializer=serializer, **kwargs) tracer = get_dummy_tracer() Pin.override(mock_client, tracer=tracer) mock_client.sock = MockSocket(mock_socket_values) client = pymemcache.client.base.PooledClient(hostname, serializer=serializer) client.client_pool = pymemcache.pool.ObjectPool(lambda: mock_client) return mock_client
def test_configuration_service_name(self): """Ensure that the integration can be configured.""" with self.override_config('vertica', dict(service_name='test_svc_name')): patch() import vertica_python test_tracer = get_dummy_tracer() conn = vertica_python.connect(**VERTICA_CONFIG) cur = conn.cursor() Pin.override(cur, tracer=test_tracer) with conn: cur.execute('DROP TABLE IF EXISTS {}'.format(TEST_TABLE)) spans = test_tracer.writer.pop() assert len(spans) == 1 assert spans[0].service == 'test_svc_name'
def test_pin_does_not_override_global(self): # ensure that when a `Pin` is created from a class, the specific # instance doesn't override the global one class A(object): pass Pin.override(A, service='metrics') global_pin = Pin.get_from(A) global_pin._config['distributed_tracing'] = True a = A() pin = Pin.get_from(a) assert pin is not None assert pin._config['distributed_tracing'] is True pin._config['distributed_tracing'] = False assert global_pin._config['distributed_tracing'] is True assert pin._config['distributed_tracing'] is False
def aiobotocore_client(service, tracer): """Helper function that creates a new aiobotocore client so that it is closed at the end of the context manager. """ session = aiobotocore.session.get_session() endpoint = LOCALSTACK_ENDPOINT_URL[service] client = session.create_client( service, region_name='us-west-2', endpoint_url=endpoint, aws_access_key_id='aws', aws_secret_access_key='aws', aws_session_token='aws', ) Pin.override(client, tracer=tracer) try: yield client finally: client.close()
def _publish_consume(self): results = [] def process_message(body, message): results.append(body) message.ack() task_queue = kombu.Queue('tasks', kombu.Exchange('tasks'), routing_key='tasks') to_publish = {'hello': 'world'} self.producer.publish(to_publish, exchange=task_queue.exchange, routing_key=task_queue.routing_key, declare=[task_queue]) with kombu.Consumer(self.conn, [task_queue], accept=['json'], callbacks=[process_message]) as consumer: Pin.override(consumer, service='kombu-patch', tracer=self.tracer) self.conn.drain_events(timeout=2) self.assertEqual(results[0], to_publish)
def test_pin_can_be_defined_per_channel(self): Pin.override(constants.GRPC_PIN_MODULE_CLIENT, service='grpc1') channel1 = grpc.insecure_channel('localhost:%d' % (_GRPC_PORT)) Pin.override(constants.GRPC_PIN_MODULE_CLIENT, service='grpc2') channel2 = grpc.insecure_channel('localhost:%d' % (_GRPC_PORT)) stub1 = HelloStub(channel1) stub2 = HelloStub(channel2) stub1.SayHello(HelloRequest(name='test')) stub2.SayHello(HelloRequest(name='test')) spans = self.get_spans() assert len(spans) == 4 # DEV: Server service default, client services override self._check_server_span(spans[0], 'grpc-server', 'SayHello', 'unary') self._check_client_span(spans[1], 'grpc1', 'SayHello', 'unary') self._check_server_span(spans[2], 'grpc-server', 'SayHello', 'unary') self._check_client_span(spans[3], 'grpc2', 'SayHello', 'unary') channel1.close() channel2.close()
def test_pin_tags_are_put_in_span(self): # DEV: stop and restart server to catch overriden pin self._stop_server() Pin.override(constants.GRPC_PIN_MODULE_SERVER, service='server1') Pin.override(constants.GRPC_PIN_MODULE_SERVER, tags={'tag1': 'server'}) Pin.override(constants.GRPC_PIN_MODULE_CLIENT, tags={'tag2': 'client'}) self._start_server() with grpc.insecure_channel('localhost:%d' % (_GRPC_PORT)) as channel: stub = HelloStub(channel) stub.SayHello(HelloRequest(name='test')) spans = self.get_spans() assert len(spans) == 2 assert spans[0].service == 'server1' assert spans[0].get_tag('tag1') == 'server' assert spans[1].get_tag('tag2') == 'client'