def test_metrics_trace_too_big(self): statsd = mock.Mock() writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=True) for i in range(10): writer.write( [Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)] ) writer.write( [Span(tracer=None, name="a" * 5000, trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(2 ** 10)] ) writer.stop() writer.join() statsd.distribution.assert_has_calls( [ mock.call("datadog.tracer.buffer.accepted.traces", 10, tags=[]), mock.call("datadog.tracer.buffer.accepted.spans", 50, tags=[]), mock.call("datadog.tracer.buffer.dropped.traces", 1, tags=["reason:t_too_big"]), mock.call("datadog.tracer.buffer.dropped.bytes", AnyInt(), tags=["reason:t_too_big"]), mock.call("datadog.tracer.http.requests", writer.RETRY_ATTEMPTS, tags=[]), mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]), mock.call("datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]), ], any_order=True, )
def test_payload_too_large(encoding, monkeypatch): SIZE = 1 << 12 # 4KB monkeypatch.setenv("DD_TRACE_API_VERSION", encoding) monkeypatch.setenv("DD_TRACE_WRITER_BUFFER_SIZE_BYTES", str(SIZE)) monkeypatch.setenv("DD_TRACE_WRITER_MAX_PAYLOAD_SIZE_BYTES", str(SIZE)) t = Tracer() assert t.writer._max_payload_size == SIZE assert t.writer._buffer_size == SIZE # Make sure a flush doesn't happen partway through. t.configure( writer=AgentWriter(agent.get_trace_url(), processing_interval=1000)) with mock.patch("ddtrace.internal.writer.log") as log: for i in range(100000 if encoding == "v0.5" else 1000): with t.trace("operation") as s: s.set_tag(str(i), "b" * 190) s.set_tag(str(i), "a" * 190) t.shutdown() calls = [ mock.call( "trace buffer (%s traces %db/%db) cannot fit trace of size %db, dropping", AnyInt(), AnyInt(), AnyInt(), AnyInt(), ) ] log.warning.assert_has_calls(calls) log.error.assert_not_called()
def test_single_trace_too_large(): t = Tracer() with mock.patch("ddtrace.internal.writer.log") as log: with t.trace("huge"): for i in range(100000): with tracer.trace("operation") as s: s.set_tag("a" * 10, "b" * 10) t.shutdown() calls = [ mock.call("trace (%db) larger than payload limit (%db), dropping", AnyInt(), AnyInt()) ] log.warning.assert_has_calls(calls) log.error.assert_not_called()
def test_metrics(): with override_global_config(dict(health_metrics_enabled=True)): t = Tracer() statsd_mock = mock.Mock() t.writer.dogstatsd = statsd_mock assert t.writer._report_metrics with mock.patch("ddtrace.internal.writer.log") as log: for _ in range(5): spans = [] for i in range(3000): spans.append(t.trace("op")) for s in spans: s.finish() t.shutdown() log.warning.assert_not_called() log.error.assert_not_called() statsd_mock.distribution.assert_has_calls( [ mock.call("datadog.tracer.buffer.accepted.traces", 5, tags=[]), mock.call( "datadog.tracer.buffer.accepted.spans", 15000, tags=[]), mock.call("datadog.tracer.http.requests", 1, tags=[]), mock.call("datadog.tracer.http.sent.bytes", AnyInt()), ], any_order=True, )
def test_write_sync(self): statsd = mock.Mock() writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=True, sync_mode=True) writer.write([ Span(tracer=None, name="name", trace_id=1, span_id=j, parent_id=j - 1 or None) for j in range(5) ]) statsd.distribution.assert_has_calls( [ mock.call("datadog.tracer.buffer.accepted.traces", 1, tags=[]), mock.call("datadog.tracer.buffer.accepted.spans", 5, tags=[]), mock.call("datadog.tracer.http.requests", writer.RETRY_ATTEMPTS, tags=[]), mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]), mock.call( "datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]), ], any_order=True, )
def test_single_trace_too_large(encoding, monkeypatch): monkeypatch.setenv("DD_TRACE_API_VERSION", encoding) t = Tracer() with mock.patch("ddtrace.internal.writer.log") as log: with t.trace("huge"): for i in range(200000): with t.trace("operation") as s: s.set_tag("a" * 10, "b" * 10) t.shutdown() calls = [ mock.call( "trace (%db) larger than payload buffer limit (%db), dropping", AnyInt(), AnyInt()) ] log.warning.assert_has_calls(calls) log.error.assert_not_called()
def test_payload_too_large(): t = Tracer() # Make sure a flush doesn't happen partway through. t.configure(writer=AgentWriter(agent.get_trace_url(), processing_interval=1000)) with mock.patch("ddtrace.internal.writer.log") as log: for i in range(100000): with t.trace("operation") as s: s.set_tag(str(i), "b" * 190) s.set_tag(str(i), "a" * 190) t.shutdown() calls = [ mock.call( "trace buffer (%s traces %db/%db) cannot fit trace of size %db, dropping", AnyInt(), AnyInt(), AnyInt(), AnyInt(), ) ] log.warning.assert_has_calls(calls) log.error.assert_not_called()