def test_partial_flush_log(run_python_code_in_subprocess): partial_flush_min_spans = 2 t = Tracer() t.configure( partial_flush_enabled=True, partial_flush_min_spans=partial_flush_min_spans, ) s1 = t.trace("1") s2 = t.trace("2") s3 = t.trace("3") t_id = s3.trace_id with mock.patch("ddtrace.internal.processor.trace.log") as log: s3.finish() s2.finish() calls = [ mock.call("trace %d has %d spans, %d finished", t_id, 3, 1), mock.call("Partially flushing %d spans for trace %d", partial_flush_min_spans, t_id), ] log.debug.assert_has_calls(calls) s1.finish() t.shutdown()
def test_payload_too_large(encoding, monkeypatch): SIZE = 1 << 12 # 4KB monkeypatch.setenv("DD_TRACE_API_VERSION", encoding) monkeypatch.setenv("DD_TRACE_WRITER_BUFFER_SIZE_BYTES", str(SIZE)) monkeypatch.setenv("DD_TRACE_WRITER_MAX_PAYLOAD_SIZE_BYTES", str(SIZE)) t = Tracer() assert t.writer._max_payload_size == SIZE assert t.writer._buffer_size == SIZE # Make sure a flush doesn't happen partway through. t.configure( writer=AgentWriter(agent.get_trace_url(), processing_interval=1000)) with mock.patch("ddtrace.internal.writer.log") as log: for i in range(100000 if encoding == "v0.5" else 1000): with t.trace("operation") as s: s.set_tag(str(i), "b" * 190) s.set_tag(str(i), "a" * 190) t.shutdown() calls = [ mock.call( "trace buffer (%s traces %db/%db) cannot fit trace of size %db, dropping", AnyInt(), AnyInt(), AnyInt(), AnyInt(), ) ] log.warning.assert_has_calls(calls) log.error.assert_not_called()
def test_single_trace_uds(): t = Tracer() sockdir = "/tmp/ddagent/trace.sock" t.configure(uds_path=sockdir) with mock.patch("ddtrace.internal.writer.log") as log: t.trace("client.testing").finish() t.shutdown() log.warning.assert_not_called() log.error.assert_not_called()
def test_uds_wrong_socket_path(): t = Tracer() t.configure(uds_path="/tmp/ddagent/nosockethere") with mock.patch("ddtrace.internal.writer.log") as log: t.trace("client.testing").finish() t.shutdown() calls = [ mock.call("failed to send traces to Datadog Agent at %s", "unix:///tmp/ddagent/nosockethere", exc_info=True) ] log.error.assert_has_calls(calls)
def test_synchronous_writer(): tracer = Tracer() writer = AgentWriter(tracer.writer.agent_url, sync_mode=True, priority_sampler=tracer.priority_sampler) tracer.configure(writer=writer) with tracer.trace("operation1", service="my-svc"): with tracer.trace("child1"): pass with tracer.trace("operation2", service="my-svc"): with tracer.trace("child2"): pass
def test_trace_bad_url(): t = Tracer() t.configure(hostname="bad", port=1111) with mock.patch("ddtrace.internal.writer.log") as log: with t.trace("op"): pass t.shutdown() calls = [mock.call("failed to send traces to Datadog Agent at %s", "http://bad:1111", exc_info=True)] log.error.assert_has_calls(calls)
def test_trace_top_level_span_processor_different_service_name(): """Parent span and child span have the different service names""" tracer = Tracer() tracer.configure(writer=DummyWriter()) with tracer.trace("parent", service="top_level_test_service") as parent: with tracer.trace("child", service="top_level_test_service2") as child: pass assert parent.get_metric("_dd.top_level") == 1 assert child.get_metric("_dd.top_level") == 1
def test_trace_top_level_span_processor_same_service_name(): """Parent span and child span have the same service name""" tracer = Tracer() tracer.configure(writer=DummyWriter()) with tracer.trace("parent", service="top_level_test") as parent: with tracer.trace("child") as child: pass assert parent.get_metric("_dd.top_level") == 1 assert "_dd.top_level" not in child.metrics
def test_single_trace_uds(encoding, monkeypatch): monkeypatch.setenv("DD_TRACE_API_VERSION", encoding) t = Tracer() sockdir = "/tmp/ddagent/trace.sock" t.configure(uds_path=sockdir) with mock.patch("ddtrace.internal.writer.log") as log: t.trace("client.testing").finish() t.shutdown() log.warning.assert_not_called() log.error.assert_not_called()
def test_configure_keeps_api_hostname_and_port(): """ Ensures that when calling configure without specifying hostname and port, previous overrides have been kept. """ tracer = Tracer() if AGENT_VERSION == "testagent": assert tracer.writer.agent_url == "http://localhost:9126" else: assert tracer.writer.agent_url == "http://localhost:8126" tracer.configure(hostname="127.0.0.1", port=8127) assert tracer.writer.agent_url == "http://127.0.0.1:8127" tracer.configure(priority_sampling=True) assert tracer.writer.agent_url == "http://127.0.0.1:8127"
def test_trace_top_level_span_processor_orphan_span(): """Trace chuck does not contain parent span""" tracer = Tracer() tracer.configure(writer=DummyWriter()) with tracer.trace("parent") as parent: pass with tracer.start_span("orphan span", child_of=parent) as orphan_span: pass # top_level in orphan_span should be explicitly set to zero/false assert orphan_span.get_metric("_dd.top_level") == 0
def test_uds_wrong_socket_path(encoding, monkeypatch): monkeypatch.setenv("DD_TRACE_API_VERSION", encoding) t = Tracer() t.configure(uds_path="/tmp/ddagent/nosockethere") with mock.patch("ddtrace.internal.writer.log") as log: t.trace("client.testing").finish() t.shutdown() calls = [ mock.call( "failed to send traces to Datadog Agent at %s", "unix:///tmp/ddagent/nosockethere/{}/traces".format( encoding if encoding else "v0.4"), exc_info=True, ) ] log.error.assert_has_calls(calls)
def test_trace_bad_url(encoding, monkeypatch): monkeypatch.setenv("DD_TRACE_API_VERSION", encoding) t = Tracer() t.configure(hostname="bad", port=1111) with mock.patch("ddtrace.internal.writer.log") as log: with t.trace("op"): pass t.shutdown() calls = [ mock.call("failed to send traces to Datadog Agent at %s", "http://bad:1111", exc_info=True) ] log.error.assert_has_calls(calls)
def test_payload_too_large(): t = Tracer() # Make sure a flush doesn't happen partway through. t.configure(writer=AgentWriter(processing_interval=1000)) with mock.patch("ddtrace.internal.writer.log") as log: for i in range(100000): with t.trace("operation") as s: s.set_tag(str(i), "b" * 190) s.set_tag(str(i), "a" * 190) t.shutdown() calls = [ mock.call( "trace buffer (%s traces %db/%db) cannot fit trace of size %db, dropping", AnyInt(), AnyInt(), AnyInt(), AnyInt(), ) ] log.warning.assert_has_calls(calls) log.error.assert_not_called()
def test_trace_top_level_span_processor_partial_flushing(): """Parent span and child span have the same service name""" tracer = Tracer() tracer.configure( partial_flush_enabled=True, partial_flush_min_spans=2, writer=DummyWriter(), ) with tracer.trace("parent") as parent: with tracer.trace("1") as child1: pass with tracer.trace("2") as child2: pass with tracer.trace("3") as child3: pass # child spans 1 and 2 were partial flushed WITHOUT the parent span in the trace chunk assert child1.get_metric("_dd.top_level") == 0 assert child2.get_metric("_dd.top_level") == 0 # child span 3 was partial flushed WITH the parent span in the trace chunk assert "_dd.top_level" not in child3.metrics assert parent.get_metric("_dd.top_level") == 1
def test_synchronous_writer_shutdown(): tracer = Tracer() tracer.configure( writer=AgentWriter(tracer.writer.agent_url, sync_mode=True)) # Ensure this doesn't raise. tracer.shutdown()