def test_writer_reuse_connections_envvar(monkeypatch): monkeypatch.setenv("DD_TRACE_WRITER_REUSE_CONNECTIONS", "false") writer = AgentWriter(agent_url="http://localhost:9126") assert not writer._reuse_connections monkeypatch.setenv("DD_TRACE_WRITER_REUSE_CONNECTIONS", "true") writer = AgentWriter(agent_url="http://localhost:9126") assert writer._reuse_connections
def test_send_stats(self): dogstatsd = mock.Mock() worker = AgentWriter(dogstatsd=dogstatsd) assert worker._send_stats is False with self.override_global_config(dict(health_metrics_enabled=True)): assert worker._send_stats is True worker = AgentWriter(dogstatsd=None) assert worker._send_stats is False with self.override_global_config(dict(health_metrics_enabled=True)): assert worker._send_stats is False
def agent_writer(self): if self._agent_writer is None: url_parsed = urlparse(self.agent_url) if url_parsed.scheme in ("http", "https"): self._agent_writer = AgentWriter( hostname=url_parsed.hostname, port=url_parsed.port, https=url_parsed.scheme == "https", ) elif url_parsed.scheme == "unix": self._agent_writer = AgentWriter(uds_path=url_parsed.path) else: raise ValueError("Unknown scheme `%s` for agent URL" % url_parsed.scheme) return self._agent_writer
def test_drop_reason_buffer_full(self): statsd = mock.Mock() writer_metrics_reset = mock.Mock() writer = AgentWriter(agent_url="http://asdf:1234", buffer_size=5300, dogstatsd=statsd, report_metrics=False) writer._metrics_reset = writer_metrics_reset for i in range(10): writer.write([ Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5) ]) writer.write([ Span(tracer=None, name="a", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5) ]) writer.stop() writer.join() writer_metrics_reset.assert_called_once() assert 1 == writer._metrics["buffer.dropped.traces"]["count"] assert ["reason:full" ] == writer._metrics["buffer.dropped.traces"]["tags"]
def test_write_sync(self): statsd = mock.Mock() writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=True, sync_mode=True) writer.write([ Span(tracer=None, name="name", trace_id=1, span_id=j, parent_id=j - 1 or None) for j in range(5) ]) statsd.distribution.assert_has_calls( [ mock.call("datadog.tracer.buffer.accepted.traces", 1, tags=[]), mock.call("datadog.tracer.buffer.accepted.spans", 5, tags=[]), mock.call("datadog.tracer.http.requests", writer.RETRY_ATTEMPTS, tags=[]), mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]), mock.call( "datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]), ], any_order=True, )
def test_additional_headers(): with override_env( dict(_DD_TRACE_WRITER_ADDITIONAL_HEADERS= "additional-header:additional-value,header2:value2")): writer = AgentWriter(agent_url="http://localhost:9126") assert writer._headers["additional-header"] == "additional-value" assert writer._headers["header2"] == "value2"
def test_metrics_trace_too_big(self): statsd = mock.Mock() writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=True) for i in range(10): writer.write( [Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)] ) writer.write( [Span(tracer=None, name="a" * 5000, trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(2 ** 10)] ) writer.stop() writer.join() statsd.distribution.assert_has_calls( [ mock.call("datadog.tracer.buffer.accepted.traces", 10, tags=[]), mock.call("datadog.tracer.buffer.accepted.spans", 50, tags=[]), mock.call("datadog.tracer.buffer.dropped.traces", 1, tags=["reason:t_too_big"]), mock.call("datadog.tracer.buffer.dropped.bytes", AnyInt(), tags=["reason:t_too_big"]), mock.call("datadog.tracer.http.requests", writer.RETRY_ATTEMPTS, tags=[]), mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]), mock.call("datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]), ], any_order=True, )
def test_payload_too_large(encoding, monkeypatch): SIZE = 1 << 12 # 4KB monkeypatch.setenv("DD_TRACE_API_VERSION", encoding) monkeypatch.setenv("DD_TRACE_WRITER_BUFFER_SIZE_BYTES", str(SIZE)) monkeypatch.setenv("DD_TRACE_WRITER_MAX_PAYLOAD_SIZE_BYTES", str(SIZE)) t = Tracer() assert t.writer._max_payload_size == SIZE assert t.writer._buffer_size == SIZE # Make sure a flush doesn't happen partway through. t.configure( writer=AgentWriter(agent.get_trace_url(), processing_interval=1000)) with mock.patch("ddtrace.internal.writer.log") as log: for i in range(100000 if encoding == "v0.5" else 1000): with t.trace("operation") as s: s.set_tag(str(i), "b" * 190) s.set_tag(str(i), "a" * 190) t.shutdown() calls = [ mock.call( "trace buffer (%s traces %db/%db) cannot fit trace of size %db, dropping", AnyInt(), AnyInt(), AnyInt(), AnyInt(), ) ] log.warning.assert_has_calls(calls) log.error.assert_not_called()
def test_filters(writer, tracer): if writer == "sync": writer = AgentWriter( tracer.writer.agent_url, priority_sampler=tracer.priority_sampler, sync_mode=True, ) # Need to copy the headers which contain the test token to associate # traces with this test case. writer._headers = tracer.writer._headers else: writer = tracer.writer class FilterMutate(object): def __init__(self, key, value): self.key = key self.value = value def process_trace(self, trace): for s in trace: s.set_tag(self.key, self.value) return trace tracer.configure( settings={ "FILTERS": [FilterMutate("boop", "beep")], }, writer=writer, ) with tracer.trace("root"): with tracer.trace("child"): pass tracer.shutdown()
def test_metrics_bad_endpoint(self): statsd = mock.Mock() writer = AgentWriter(dogstatsd=statsd, report_metrics=True, hostname="asdf", port=1234) for i in range(10): writer.write([ Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5) ]) writer.stop() writer.join() statsd.increment.assert_has_calls([ mock.call("datadog.tracer.http.requests"), ]) statsd.distribution.assert_has_calls( [ mock.call("datadog.tracer.buffer.accepted.traces", 10, tags=[]), mock.call("datadog.tracer.buffer.accepted.spans", 50, tags=[]), mock.call("datadog.tracer.http.requests", 1, tags=[]), mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]), mock.call( "datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]), ], any_order=True, )
def test_drop_reason_encoding_error(self): n_traces = 10 statsd = mock.Mock() writer_encoder = mock.Mock() writer_encoder.__len__ = ( lambda *args: n_traces).__get__(writer_encoder) writer_metrics_reset = mock.Mock() writer_encoder.encode.side_effect = Exception writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=False) writer._encoder = writer_encoder writer._metrics_reset = writer_metrics_reset for i in range(n_traces): writer.write([ Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5) ]) writer.stop() writer.join() writer_metrics_reset.assert_called_once() assert 10 == writer._metrics["encoder.dropped.traces"]["count"]
def test_agent_url_path(endpoint_assert_path): # test without base path endpoint_assert_path("/v0.") writer = AgentWriter(agent_url="http://%s:%s/" % (_HOST, _PORT)) writer._buffer.put(b"foobar") writer.flush_queue(raise_exc=True) # test without base path nor trailing slash writer = AgentWriter(agent_url="http://%s:%s" % (_HOST, _PORT)) writer._buffer.put(b"foobar") writer.flush_queue(raise_exc=True) # test with a base path endpoint_assert_path("/test/v0.") writer = AgentWriter(agent_url="http://%s:%s/test/" % (_HOST, _PORT)) writer._buffer.put(b"foobar") writer.flush_queue(raise_exc=True)
def test_flush_connection_reset(endpoint_test_reset_server): writer = AgentWriter(agent_url="http://%s:%s" % (_HOST, _RESET_PORT)) if PY3: exc_types = (httplib.BadStatusLine, ConnectionResetError) else: exc_types = (httplib.BadStatusLine,) with pytest.raises(exc_types): writer._send_payload("foobar", 12)
def test_flush_connection_timeout_connect(): writer = AgentWriter(agent_url="http://%s:%s" % (_HOST, 2019)) if PY3: exc_type = OSError else: exc_type = socket.error with pytest.raises(exc_type): writer._send_payload("foobar", 12)
def test_flush_connection_timeout_connect(): writer = AgentWriter(agent_url="http://%s:%s" % (_HOST, 2019)) if PY3: exc_type = OSError else: exc_type = socket.error with pytest.raises(exc_type): writer._buffer.put(b"foobar") writer.flush_queue(raise_exc=True)
def test_writer_reuse_connections(): # Ensure connection is not reused writer = AgentWriter(agent_url="http://localhost:9126", reuse_connections=True) # Do an initial flush to get a connection writer.flush_queue() assert writer._conn is None writer.flush_queue() assert writer._conn is None
def test_double_stop(): # Ensure double stopping doesn't result in an exception. writer = AgentWriter(agent_url="http://dne:1234") writer.write([]) assert writer.status == service.ServiceStatus.RUNNING writer.stop() assert writer.status == service.ServiceStatus.STOPPED writer.stop() assert writer.status == service.ServiceStatus.STOPPED
def test_flush_connection_reset(endpoint_test_reset_server): writer = AgentWriter(agent_url="http://%s:%s" % (_HOST, _RESET_PORT)) if PY3: exc_types = (httplib.BadStatusLine, ConnectionResetError) else: exc_types = (httplib.BadStatusLine,) with pytest.raises(exc_types): writer._buffer.put(b"foobar") writer.flush_queue(raise_exc=True)
def test_writer_reuse_connections_false(): # Ensure connection is reused writer = AgentWriter(agent_url="http://localhost:9126", reuse_connections=False) # Do an initial flush to get a connection writer.flush_queue() conn = writer._conn # And another to potentially have it reset writer.flush_queue() assert writer._conn is conn
def test_writer_recreate_api_version(init_api_version, api_version, endpoint, encoder_cls): writer = AgentWriter(agent_url="http://dne:1234", api_version=init_api_version) assert writer._api_version == api_version assert writer._endpoint == endpoint assert isinstance(writer._encoder, encoder_cls) writer = writer.recreate() assert writer._api_version == api_version assert writer._endpoint == endpoint assert isinstance(writer._encoder, encoder_cls)
def test_synchronous_writer(): tracer = Tracer() writer = AgentWriter(tracer.writer.agent_url, sync_mode=True, priority_sampler=tracer.priority_sampler) tracer.configure(writer=writer) with tracer.trace("operation1", service="my-svc"): with tracer.trace("child1"): pass with tracer.trace("operation2", service="my-svc"): with tracer.trace("child2"): pass
def test_flush_queue_raise(): writer = AgentWriter(agent_url="http://dne:1234") # Should not raise writer.write([]) writer.flush_queue(raise_exc=False) error = OSError if PY3 else IOError with pytest.raises(error): writer.write([]) writer.flush_queue(raise_exc=True)
def test_configure_dogstatsd_url_host_port(self): tracer = Tracer() tracer.configure(dogstatsd_url="foo:1234") assert tracer.writer.dogstatsd.host == "foo" assert tracer.writer.dogstatsd.port == 1234 tracer = Tracer() writer = AgentWriter("http://localhost:8126") tracer.configure(writer=writer, dogstatsd_url="foo:1234") assert tracer.writer.dogstatsd.host == "foo" assert tracer.writer.dogstatsd.port == 1234
def test_double_stop(): # Ensure double stopping doesn't result in an exception. writer = AgentWriter(agent_url="http://dne:1234") writer.write([]) assert writer.started writer.stop() assert writer.started assert not writer.is_alive() writer.stop() assert writer.started assert not writer.is_alive()
def test_flush_log(caplog): caplog.set_level(logging.INFO) writer = AgentWriter(agent.get_trace_url()) with mock.patch("ddtrace.internal.writer.log") as log: writer.write([]) writer.flush_queue(raise_exc=True) calls = [ mock.call(logging.DEBUG, "sent %s in %.5fs", AnyStr(), AnyFloat()) ] log.log.assert_has_calls(calls)
def test_metrics_disabled(self): statsd = mock.Mock() writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=False) for i in range(10): writer.write( [Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)] ) writer.stop() writer.join() statsd.increment.assert_not_called() statsd.distribution.assert_not_called()
def create_worker(self, filters=None, api_class=DummyAPI): self.dogstatsd = mock.Mock() worker = AgentWriter(dogstatsd=self.dogstatsd, filters=filters) self.api = api_class() worker.api = self.api for i in range(self.N_TRACES): worker.write([ Span(tracer=None, name='name', trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(7) ]) worker.stop() worker.join() return worker
def test_configure_dogstatsd_url_socket(self): tracer = Tracer() tracer.configure(dogstatsd_url="unix:///foo.sock") assert tracer.writer.dogstatsd.host is None assert tracer.writer.dogstatsd.port is None assert tracer.writer.dogstatsd.socket_path == "/foo.sock" tracer = Tracer() writer = AgentWriter("http://localhost:8126") tracer.configure(writer=writer, dogstatsd_url="unix:///foo.sock") assert tracer.writer.dogstatsd.host is None assert tracer.writer.dogstatsd.port is None assert tracer.writer.dogstatsd.socket_path == "/foo.sock"
def test_racing_start(): writer = AgentWriter(agent_url="http://dne:1234") def do_write(i): writer.write([Span(None, str(i))]) ts = [threading.Thread(target=do_write, args=(i,)) for i in range(100)] for t in ts: t.start() for t in ts: t.join() assert len(writer._buffer) == 100
def create_worker(self, filters=None, api_class=DummyAPI, enable_stats=False): with self.override_global_config(dict(health_metrics_enabled=enable_stats)): self.dogstatsd = mock.Mock() worker = AgentWriter(dogstatsd=self.dogstatsd, filters=filters) worker._STATS_EVERY_INTERVAL = 1 self.api = api_class() worker.api = self.api for i in range(self.N_TRACES): worker.write( [Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(7)] ) worker.stop() worker.join() return worker