示例#1
0
 def __init__(self) -> None:
     super().__init__(
         storages=[],
         query_pipeline_builder=Mock(),
         abstract_column_set=GROUPS_SCHEMA,
         join_relationships={
             "events":
             JoinRelationship(
                 rhs_entity=EntityKey.EVENTS,
                 columns=[("id", "group_id")],
                 join_type=JoinType.INNER,
                 equivalences=[
                     ColumnEquivalence("project_id", "project_id")
                 ],
             ),
             "assigned":
             JoinRelationship(
                 rhs_entity=EntityKey.GROUPASSIGNEE,
                 columns=[("user_id", "user_id")],
                 join_type=JoinType.INNER,
                 equivalences=[],
             ),
         },
         writable_storage=None,
         validators=None,
         required_time_column=None,
     )
示例#2
0
 def __init__(self) -> None:
     super().__init__(
         storages=[],
         query_pipeline_builder=Mock(),
         abstract_column_set=EVENTS_SCHEMA,
         join_relationships={
             "grouped": JoinRelationship(
                 rhs_entity=EntityKey.GROUPEDMESSAGES,
                 columns=[("group_id", "id")],
                 join_type=JoinType.INNER,
                 equivalences=[ColumnEquivalence("project_id", "project_id")],
             ),
             "assigned_group": JoinRelationship(
                 rhs_entity=EntityKey.GROUPASSIGNEE,
                 columns=[("group_id", "group_id")],
                 join_type=JoinType.INNER,
                 equivalences=[ColumnEquivalence("project_id", "project_id")],
             ),
             # This makes no sense but it is for the sake of the test
             "assigned_user": JoinRelationship(
                 rhs_entity=EntityKey.GROUPASSIGNEE,
                 columns=[("user_id", "user_id")],
                 join_type=JoinType.INNER,
                 equivalences=[ColumnEquivalence("project_id", "project_id")],
             ),
             # This makes even less sense but self referencing joins are possible
             "self_relationship": JoinRelationship(
                 rhs_entity=EntityKey.EVENTS,
                 columns=[("event_id", "event_id")],
                 join_type=JoinType.INNER,
                 equivalences=[],
             ),
         },
         writable_storage=None,
     )
示例#3
0
    def __init__(self,
                 custom_mappers: Optional[TranslationMappers] = None) -> None:
        storage = get_writable_storage(StorageKey.TRANSACTIONS)
        schema = storage.get_table_writer().get_schema()

        super().__init__(
            storages=[storage],
            query_pipeline_builder=SimplePipelineBuilder(
                query_plan_builder=SingleStorageQueryPlanBuilder(
                    storage=storage,
                    mappers=transaction_translator if custom_mappers is None
                    else transaction_translator.concat(custom_mappers),
                ), ),
            abstract_column_set=schema.get_columns(),
            join_relationships={
                "contains":
                JoinRelationship(
                    rhs_entity=EntityKey.SPANS,
                    columns=[
                        ("project_id", "project_id"),
                        ("span_id", "transaction_span_id"),
                    ],
                    join_type=JoinType.INNER,
                    equivalences=[
                        ColumnEquivalence("event_id", "transaction_id"),
                        ColumnEquivalence("transaction_name",
                                          "transaction_name"),
                        ColumnEquivalence("trace_id", "trace_id"),
                    ],
                )
            },
            writable_storage=storage,
        )
 def events_mock(relationship: str) -> JoinRelationship:
     entity_key, rhs_column = mapping[relationship]
     return JoinRelationship(
         rhs_entity=entity_key,
         join_type=JoinType.INNER,
         columns=[("event_id", rhs_column)],
         equivalences=[],
     )
示例#5
0
    def __init__(self,
                 custom_mappers: Optional[TranslationMappers] = None) -> None:
        if settings.ERRORS_ROLLOUT_ALL:
            events_storage = get_writable_storage(StorageKey.ERRORS)
            pipeline_builder = SimplePipelineBuilder(
                query_plan_builder=SelectedStorageQueryPlanBuilder(
                    selector=ErrorsQueryStorageSelector(
                        mappers=errors_translators if custom_mappers is None
                        else errors_translators.concat(custom_mappers))), )
        else:
            events_storage = get_writable_storage(StorageKey.EVENTS)
            pipeline_builder = SimplePipelineBuilder(
                query_plan_builder=SelectedStorageQueryPlanBuilder(
                    selector=EventsQueryStorageSelector(
                        mappers=event_translator if custom_mappers is None else
                        event_translator.concat(custom_mappers))), )

        schema = events_storage.get_table_writer().get_schema()
        columns = schema.get_columns()

        super().__init__(
            storages=[events_storage],
            query_pipeline_builder=pipeline_builder,
            abstract_column_set=columns,
            join_relationships={
                "grouped":
                JoinRelationship(
                    rhs_entity=EntityKey.GROUPEDMESSAGES,
                    columns=[("project_id", "project_id"), ("group_id", "id")],
                    join_type=JoinType.INNER,
                    equivalences=[],
                ),
                "assigned":
                JoinRelationship(
                    rhs_entity=EntityKey.GROUPASSIGNEE,
                    columns=[("project_id", "project_id"),
                             ("group_id", "group_id")],
                    join_type=JoinType.INNER,
                    equivalences=[],
                ),
            },
            writable_storage=events_storage,
            validators=[EntityRequiredColumnValidator({"project_id"})],
            required_time_column="timestamp",
        )
示例#6
0
    def __init__(self) -> None:
        storage = get_writable_storage(StorageKey.SPANS)

        super().__init__(
            storages=[storage],
            query_pipeline_builder=SimplePipelineBuilder(
                query_plan_builder=SingleStorageQueryPlanBuilder(
                    storage=storage,
                    mappers=TranslationMappers(
                        subscriptables=[
                            SubscriptableMapper(None, "tags", None, "tags")
                        ],
                    ),
                ),
            ),
            abstract_column_set=ColumnSet(
                [
                    ("project_id", UInt(64)),
                    ("transaction_id", UUID()),
                    ("trace_id", UUID()),
                    ("transaction_span_id", UInt(64)),
                    ("span_id", UInt(64)),
                    ("parent_span_id", UInt(64, Modifiers(nullable=True))),
                    ("transaction_name", String()),
                    ("op", String()),
                    ("status", UInt(8)),
                    ("start_ts", DateTime()),
                    ("start_ns", UInt(32)),
                    ("finish_ts", DateTime()),
                    ("finish_ns", UInt(32)),
                    ("duration_ms", UInt(32)),
                    ("tags", Nested([("key", String()), ("value", String())])),
                ]
            ),
            join_relationships={
                "contained": JoinRelationship(
                    rhs_entity=EntityKey.TRANSACTIONS,
                    columns=[
                        ("project_id", "project_id"),
                        ("transaction_span_id", "span_id"),
                    ],
                    join_type=JoinType.INNER,
                    equivalences=[
                        ColumnEquivalence("transaction_id", "event_id"),
                        ColumnEquivalence("transaction_name", "transaction_name"),
                        ColumnEquivalence("trace_id", "trace_id"),
                    ],
                )
            },
            writable_storage=storage,
            validators=[EntityRequiredColumnValidator({"project_id"})],
            required_time_column=None,
        )
示例#7
0
    def __init__(self) -> None:
        storage = get_cdc_storage(StorageKey.GROUPASSIGNEES)
        schema = storage.get_table_writer().get_schema()

        super().__init__(
            storages=[storage],
            query_pipeline_builder=SimplePipelineBuilder(
                query_plan_builder=SingleStorageQueryPlanBuilder(storage=storage),
            ),
            abstract_column_set=schema.get_columns(),
            join_relationships={
                "owns": JoinRelationship(
                    rhs_entity=EntityKey.EVENTS,
                    columns=[("project_id", "project_id"), ("group_id", "group_id")],
                    join_type=JoinType.LEFT,
                    equivalences=[],
                )
            },
            writable_storage=storage,
            required_filter_columns=None,
            required_time_column=None,
        )
示例#8
0
def test_joins(
    clauses: Sequence[Tuple[str, str]], expected: JoinClause[QueryEntity]
) -> None:
    relationships = []

    for clause in clauses:
        lhs, rhs = clause
        lhs_alias, lhs = lhs.split(":", 1)
        rhs_alias, rhs = rhs.split(":", 1)
        data = JoinRelationship(
            rhs_entity=EntityKey(rhs),
            join_type=JoinType.INNER,
            columns=[("event_id", "event_id")],
            equivalences=[],
        )
        relationships.append(
            RelationshipTuple(
                node(lhs_alias, lhs), uuid.uuid4().hex, node(rhs_alias, rhs), data,
            )
        )

    result = build_join_clause(relationships)
    assert result == expected
示例#9
0
    def __init__(self, custom_mappers: Optional[TranslationMappers] = None) -> None:
        events_storage = get_writable_storage(StorageKey.EVENTS)
        errors_storage = get_writable_storage(StorageKey.ERRORS)
        schema = events_storage.get_table_writer().get_schema()
        columns = schema.get_columns()

        events_pipeline_builder = SimplePipelineBuilder(
            query_plan_builder=SelectedStorageQueryPlanBuilder(
                selector=EventsQueryStorageSelector(
                    mappers=event_translator
                    if custom_mappers is None
                    else event_translator.concat(custom_mappers)
                )
            ),
        )

        errors_pipeline_builder = SimplePipelineBuilder(
            query_plan_builder=SelectedStorageQueryPlanBuilder(
                selector=ErrorsQueryStorageSelector(
                    mappers=errors_translators
                    if custom_mappers is None
                    else errors_translators.concat(custom_mappers)
                )
            ),
        )

        def selector_func(_query: Query, referrer: str) -> Tuple[str, List[str]]:
            # In case something goes wrong, set this to 1 to revert to the events storage.
            kill_rollout = state.get_config("errors_rollout_killswitch", 0)
            assert isinstance(kill_rollout, (int, str))
            if int(kill_rollout):
                return "events", []

            if referrer in settings.ERRORS_ROLLOUT_BY_REFERRER:
                return "errors", []

            if settings.ERRORS_ROLLOUT_ALL:
                return "errors", []

            default_threshold = state.get_config("errors_query_percentage", 0)
            assert isinstance(default_threshold, (float, int, str))
            threshold = settings.ERRORS_QUERY_PERCENTAGE_BY_REFERRER.get(
                referrer, default_threshold
            )

            if random.random() < float(threshold):
                return "events", ["errors"]

            return "events", []

        def writable_storage() -> WritableTableStorage:
            if settings.ERRORS_ROLLOUT_WRITABLE_STORAGE:
                return get_writable_storage(StorageKey.ERRORS)
            else:
                return get_writable_storage(StorageKey.EVENTS)

        super().__init__(
            storages=[events_storage, errors_storage],
            query_pipeline_builder=PipelineDelegator(
                query_pipeline_builders={
                    "events": events_pipeline_builder,
                    "errors": errors_pipeline_builder,
                },
                selector_func=selector_func,
                callback_func=partial(callback_func, "errors"),
            ),
            abstract_column_set=columns,
            join_relationships={
                "grouped": JoinRelationship(
                    rhs_entity=EntityKey.GROUPEDMESSAGES,
                    columns=[("project_id", "project_id"), ("group_id", "id")],
                    join_type=JoinType.INNER,
                    equivalences=[],
                ),
                "assigned": JoinRelationship(
                    rhs_entity=EntityKey.GROUPASSIGNEE,
                    columns=[("project_id", "project_id"), ("group_id", "group_id")],
                    join_type=JoinType.INNER,
                    equivalences=[],
                ),
            },
            writable_storage=writable_storage(),
            required_filter_columns=["project_id"],
            required_time_column="timestamp",
        )