def test_set_flattened_tags_keys(): """Ensure expected keys in flattened dictionary""" d = dict(A=1, B=2, C=dict(A=3, B=4, C=dict(A=5, B=6))) e = dict(A=1, B=2, C_A=3, C_B=4, C_C_A=5, C_C_B=6) span = Span("test") trace_utils.set_flattened_tags(span, d.items(), sep="_") assert span.get_metrics() == e
def test_aggregator_single_span(): class Proc(TraceProcessor): def process_trace(self, trace): return trace mock_proc1 = mock.Mock(wraps=Proc()) mock_proc2 = mock.Mock(wraps=Proc()) writer = DummyWriter() aggr = SpanAggregator( partial_flush_enabled=False, partial_flush_min_spans=0, trace_processors=[ mock_proc1, mock_proc2, ], writer=writer, ) span = Span(None, "span", on_finish=[aggr.on_span_finish]) aggr.on_span_start(span) span.finish() mock_proc1.process_trace.assert_called_with([span]) mock_proc2.process_trace.assert_called_with([span]) assert writer.pop() == [span]
def test_aggregator_bad_processor(): class Proc(TraceProcessor): def process_trace(self, trace): return trace class BadProc(TraceProcessor): def process_trace(self, trace): raise ValueError mock_good_before = mock.Mock(wraps=Proc()) mock_bad = mock.Mock(wraps=BadProc()) mock_good_after = mock.Mock(wraps=Proc()) writer = DummyWriter() aggr = SpanAggregator( partial_flush_enabled=False, partial_flush_min_spans=0, trace_processors=[ mock_good_before, mock_bad, mock_good_after, ], writer=writer, ) span = Span(None, "span", on_finish=[aggr.on_span_finish]) aggr.on_span_start(span) span.finish() mock_good_before.process_trace.assert_called_with([span]) mock_bad.process_trace.assert_called_with([span]) mock_good_after.process_trace.assert_called_with([span]) assert writer.pop() == [span]
def test_span_api_fork(): q = MPQueue() pid = os.fork() if pid > 0: # parent parent_ids_list = list( chain.from_iterable((s.span_id, s.trace_id) for s in [Span(None, None) for _ in range(100)]) ) parent_ids = set(parent_ids_list) assert len(parent_ids) == len(parent_ids_list), "Collisions found in parent process ids" child_ids_list = q.get() child_ids = set(child_ids_list) assert len(child_ids) == len(child_ids_list), "Collisions found in child process ids" assert parent_ids & child_ids == set() else: # child try: child_ids = list( chain.from_iterable((s.span_id, s.trace_id) for s in [Span(None, None) for _ in range(100)]) ) q.put(child_ids) finally: os._exit(0)
def test_multi_filter_none(): class Filter(TraceFilter): def process_trace(self, trace): return None tp = TraceProcessor([Filter(), Filter()]) trace = [Span(None, "span1"), Span(None, "span2")] spans = tp.process(trace) assert spans is None
def test_trace_top_level_span_processor_trace_return_val(): """TraceProcessor returns spans""" trace_processors = TraceTopLevelSpanProcessor() # Trace contains no spans trace = [] assert trace_processors.process_trace(trace) == trace trace = [Span(None, "span1"), Span(None, "span2"), Span(None, "span3")] # Test return value contains all spans in the argument assert trace_processors.process_trace(trace[:]) == trace
def test_set_flattened_tags_exclude_policy(): """Ensure expected keys in flattened dictionary with exclusion set""" d = dict(A=1, B=2, C=dict(A=3, B=4, C=dict(A=5, B=6))) e = dict(A=1, B=2, C_B=4) span = Span("test") trace_utils.set_flattened_tags( span, d.items(), sep="_", exclude_policy=lambda tag: tag in {"C_A", "C_C"}) assert span.get_metrics() == e
def test_filter_error(): class Filter(TraceFilter): def process_trace(self, trace): raise Exception() f = Filter() with mock.patch("ddtrace.internal.processor.log") as log: tp = TraceProcessor([f]) trace = [Span(None, "span1"), Span(None, "span2")] spans = tp.process(trace) calls = [mock.call("error applying filter %r to traces", f, exc_info=True)] log.error.assert_has_calls(calls) assert spans == trace
def run(self): # Generate random service and operation names for the counts we requested services = [rands() for _ in range(self.num_services)] operation_names = [rands() for _ in range(self.num_operations)] # Generate all possible permutations of service and operation names spans = [ Span(tracer=None, service=service, name=name) for service, name in itertools.product(services, operation_names) ] # Create a single rule to use for all matches # Pick a random service/operation name rule = SamplingRule( service=random.choice(services), name=random.choice(operation_names), sample_rate=1.0, ) def _(loops): for _ in range(loops): for span in iter_n(spans, n=self.num_iterations): rule.matches(span) yield _
def test_multi_filter_mutate(): class Filter(TraceFilter): def process_trace(self, trace): trace[0].set_tag("test", "value") return trace class Filter2(TraceFilter): def process_trace(self, trace): trace[1].set_tag("test", "value2") return trace tp = TraceProcessor([Filter(), Filter2()]) trace = [Span(None, "span1"), Span(None, "span2")] spans = tp.process(trace) assert [s.get_tag("test") for s in spans] == ["value", "value2"]
def gen_spans(nspans=1, trace_id=None): spans = [] for _ in range(0, nspans): spans.append( Span(None, "test.op", resource="resource", service="service", trace_id=trace_id)) return spans
def time_start_span(loops, variant): range_it = range(loops) t0 = pyperf.perf_counter() for _ in range_it: for _ in range(0, variant.get("nspans")): Span(None, "test.op", resource="resource", service="service", trace_id=variant.get("trace_id")) dt = pyperf.perf_counter() - t0 return dt
def test_aggregator_partial_flush_0_spans(): writer = DummyWriter() aggr = SpanAggregator(partial_flush_enabled=True, partial_flush_min_spans=0, trace_processors=[], writer=writer) # Normal usage parent = Span("parent", on_finish=[aggr.on_span_finish]) aggr.on_span_start(parent) child = Span("child", on_finish=[aggr.on_span_finish]) child.trace_id = parent.trace_id child.parent_id = parent.span_id aggr.on_span_start(child) assert writer.pop() == [] child.finish() assert writer.pop() == [child] parent.finish() assert writer.pop() == [parent] # Parent closes before child parent = Span("parent", on_finish=[aggr.on_span_finish]) aggr.on_span_start(parent) child = Span("child", on_finish=[aggr.on_span_finish]) child.trace_id = parent.trace_id child.parent_id = parent.span_id aggr.on_span_start(child) assert writer.pop() == [] parent.finish() assert writer.pop() == [parent] assert parent.get_metric("_dd.py.partial_flush") == 1 child.finish() assert writer.pop() == [child] assert child.get_metric("_dd.py.partial_flush") == 1
def test_aggregator_multi_span(): writer = DummyWriter() aggr = SpanAggregator(partial_flush_enabled=False, partial_flush_min_spans=0, trace_processors=[], writer=writer) # Normal usage parent = Span(None, "parent", on_finish=[aggr.on_span_finish]) aggr.on_span_start(parent) child = Span(None, "child", on_finish=[aggr.on_span_finish]) child.trace_id = parent.trace_id child.parent_id = parent.span_id aggr.on_span_start(child) assert writer.pop() == [] child.finish() assert writer.pop() == [] parent.finish() assert writer.pop() == [parent, child] # Parent closes before child parent = Span(None, "parent", on_finish=[aggr.on_span_finish]) aggr.on_span_start(parent) child = Span(None, "child", on_finish=[aggr.on_span_finish]) child.trace_id = parent.trace_id child.parent_id = parent.span_id aggr.on_span_start(child) assert writer.pop() == [] parent.finish() assert writer.pop() == [] child.finish() assert writer.pop() == [parent, child]
def test_set_flattened_tags_is_flat(items): """Ensure that flattening of a nested dict results in a normalized, 1-level dict""" span = Span("test") trace_utils.set_flattened_tags(span, items) assert isinstance(span.get_tags(), dict) assert not any(isinstance(v, dict) for v in span.get_tags().values())
def span(self): yield Span("some_span")
def span(self): yield Span(tracer, 'some_span')
def test_no_filters(): tp = TraceProcessor([]) trace = [Span(None, "span1"), Span(None, "span2")] spans = tp.process(trace) assert spans == trace
def test_aggregator_partial_flush_2_spans(): writer = DummyWriter() aggr = SpanAggregator(partial_flush_enabled=True, partial_flush_min_spans=2, trace_processors=[], writer=writer) # Normal usage parent = Span(None, "parent", on_finish=[aggr.on_span_finish]) aggr.on_span_start(parent) child = Span(None, "child", on_finish=[aggr.on_span_finish]) child.trace_id = parent.trace_id child.parent_id = parent.span_id aggr.on_span_start(child) assert writer.pop() == [] child.finish() assert writer.pop() == [] parent.finish() assert writer.pop() == [parent, child] # Parent closes before child parent = Span(None, "parent", on_finish=[aggr.on_span_finish]) aggr.on_span_start(parent) child = Span(None, "child", on_finish=[aggr.on_span_finish]) child.trace_id = parent.trace_id child.parent_id = parent.span_id aggr.on_span_start(child) assert writer.pop() == [] parent.finish() assert writer.pop() == [] child.finish() assert writer.pop() == [parent, child] # Partial flush parent = Span(None, "parent", on_finish=[aggr.on_span_finish]) aggr.on_span_start(parent) child1 = Span(None, "child1", on_finish=[aggr.on_span_finish]) child1.trace_id = parent.trace_id child1.parent_id = parent.span_id aggr.on_span_start(child1) child2 = Span(None, "child2", on_finish=[aggr.on_span_finish]) child2.trace_id = parent.trace_id child2.parent_id = parent.span_id aggr.on_span_start(child2) assert writer.pop() == [] child1.finish() assert writer.pop() == [] child2.finish() assert writer.pop() == [child1, child2] parent.finish() assert writer.pop() == [parent]
def span(self): yield Span(tracer, "some_span")
dt = pyperf.perf_counter() - t0 return dt if __name__ == "__main__": runner = pyperf.Runner() runner.metadata["scenario"] = "span" for variant in [dict(nspans=1000), dict(nspans=1000, trace_id=1)]: name = "|".join(f"{k}:{v}" for (k, v) in variant.items()) runner.bench_time_func("perf_group:span|perf_case:init_span|" + name, time_start_span, variant) for variant in [dict(ntags=500, ltags=100), dict(ntags=1000, ltags=300)]: span = Span(None, "test.op", resource="resource", service="service", trace_id=1) tags = gen_tags(**variant) name = "|".join(f"{k}:{v}" for (k, v) in variant.items()) runner.bench_time_func("perf_group:span|perf_case:add_tags|" + name, time_add_tags, span, tags) for variant in [ dict(nmetrics=500, lmetrics=100), dict(nmetrics=1000, lmetrics=300) ]: span = Span(None, "test.op", resource="resource", service="service",