示例#1
0
def test_span_process() -> None:
    timestamp = datetime.now(tz=timezone.utc) - timedelta(seconds=5)
    start_timestamp = timestamp - timedelta(seconds=4)
    message = SpanEvent(
        event_id="e5e062bf2e1d4afd96fd2f90b6770431",
        trace_id="7400045b25c443b885914600aa83ad04",
        span_id="8841662216cc598b",
        parent_span_id="b76a8ca0b0908a15",
        transaction_name="/organizations/:orgId/issues/",
        op="navigation",
        timestamp=timestamp.timestamp(),
        start_timestamp=start_timestamp.timestamp(),
        spans=[
            SpanData(
                trace_id="7400045b25c443b885914600aa83ad04",
                span_id="b95eff64930fef25",
                parent_span_id="8841662216cc598b",
                op="db",
                start_timestamp=(start_timestamp +
                                 timedelta(seconds=1)).timestamp(),
                timestamp=(start_timestamp + timedelta(seconds=2)).timestamp(),
            ),
            SpanData(
                trace_id="7400045b25c443b885914600aa83ad04",
                span_id="9f8e7bbe7bf22e09",
                parent_span_id="b95eff64930fef25",
                op="web",
                start_timestamp=(start_timestamp +
                                 timedelta(seconds=2)).timestamp(),
                timestamp=(start_timestamp + timedelta(seconds=3)).timestamp(),
            ),
        ],
    )
    meta = KafkaMessageMetadata(offset=1,
                                partition=2,
                                timestamp=datetime(1970, 1, 1))
    processed = SpansMessageProcessor().process_message(
        message.serialize(), meta)
    assert isinstance(processed, InsertBatch)
    expected_rows = message.build_result(meta)

    for span, expected in zip(processed.rows, expected_rows):
        assert span == expected
示例#2
0
    ),
    ("start_ts", DateTime()),
    ("start_ns", UInt(32)),
    ("finish_ts", DateTime()),
    ("finish_ns", UInt(32)),
    ("duration_ms", UInt(32)),
    ("tags", Nested([("key", String()), ("value", String())])),
    ("_tags_hash_map", Materialized(Array(UInt(64)), TAGS_HASH_MAP_COLUMN)),
    ("retention_days", UInt(16)),
    ("deleted", UInt(8)),
])

schema = WritableTableSchema(
    columns=columns,
    local_table_name="spans_experimental_local",
    dist_table_name="spans_experimental_dist",
    storage_set_key=StorageSetKey.TRANSACTIONS,
)

storage = WritableTableStorage(
    storage_key=StorageKey.SPANS,
    storage_set_key=StorageSetKey.TRANSACTIONS,
    schema=schema,
    query_processors=[PrewhereProcessor()],
    stream_loader=KafkaStreamLoader(
        processor=SpansMessageProcessor(),
        default_topic="events",
    ),
    query_splitters=[TimeSplitQueryStrategy(timestamp_col="finish_ts")],
)