from snuba import settings from snuba.clickhouse.columns import UUID, Array, ColumnSet, DateTime, Float from snuba.clickhouse.columns import SchemaModifiers as Modifiers from snuba.clickhouse.columns import String, UInt from snuba.clusters.storage_sets import StorageSetKey from snuba.datasets.querylog_processor import QuerylogProcessor from snuba.datasets.schemas.tables import WritableTableSchema from snuba.datasets.storage import WritableTableStorage from snuba.datasets.storages import StorageKey from snuba.datasets.table_storage import KafkaStreamLoader columns = ColumnSet([ ("request_id", UUID()), ("request_body", String()), ("referrer", String()), ("dataset", String()), ("projects", Array(UInt(64))), ("organization", UInt(64, Modifiers(nullable=True))), ("timestamp", DateTime()), ("duration_ms", UInt(32)), ("status", String()), # clickhouse_queries Nested columns. # This is expanded into arrays instead of being expressed as a # Nested column because, when adding new columns to a nested field # we need to provide a default for the entire array (each new column # is an array). # The same schema cannot be achieved with the Nested construct (where # we can only provide default for individual values), so, if we # use the Nested construct, this schema cannot match the one generated # by the migration framework (or by any ALTER statement). ("clickhouse_queries.sql", Array(String())),
UInt, ) from snuba.clusters.storage_sets import StorageSetKey from snuba.datasets.storages.tags_hash_map import TAGS_HASH_MAP_COLUMN from snuba.migrations import migration, operations, table_engines from snuba.migrations.columns import MigrationModifiers as Modifiers # List from 0001_events_initial hierachical hashes columns columns: List[Column[Modifiers]] = [ Column("event_id", FixedString(32)), Column("project_id", UInt(64)), Column("group_id", UInt(64)), Column("timestamp", DateTime()), Column("deleted", UInt(8)), Column("retention_days", UInt(16)), Column("platform", String(Modifiers(nullable=True))), Column("message", String(Modifiers(nullable=True))), Column("primary_hash", FixedString(32, Modifiers(nullable=True))), Column("hierarchical_hashes", Array(FixedString(32))), Column("received", DateTime(Modifiers(nullable=True))), Column("search_message", String(Modifiers(nullable=True))), Column("title", String(Modifiers(nullable=True))), Column("location", String(Modifiers(nullable=True))), Column("user_id", String(Modifiers(nullable=True))), Column("username", String(Modifiers(nullable=True))), Column("email", String(Modifiers(nullable=True))), Column("ip_address", String(Modifiers(nullable=True))), Column("geo_country_code", String(Modifiers(nullable=True))), Column("geo_region", String(Modifiers(nullable=True))), Column("geo_city", String(Modifiers(nullable=True))), Column("sdk_name", String(Modifiers(nullable=True))),
Float, Nested, Nullable, String, UInt, UUID, ) from snuba.clusters.storage_sets import StorageSetKey from snuba.migrations import migration, operations, table_engines from snuba.migrations.columns import LowCardinality status_type = Enum([("success", 0), ("error", 1), ("rate-limited", 2)]) columns = [ Column("request_id", UUID()), Column("request_body", String()), Column("referrer", LowCardinality(String())), Column("dataset", LowCardinality(String())), Column("projects", Array(UInt(64))), Column("organization", Nullable(UInt(64))), Column("timestamp", DateTime()), Column("duration_ms", UInt(32)), Column("status", status_type), Column( "clickhouse_queries", Nested([ Column("sql", String()), Column("status", status_type), Column("trace_id", Nullable(UUID())), Column("duration_ms", UInt(32)), Column("stats", String()),
PostReplacementConsistencyEnforcer, ) from snuba.datasets.table_storage import KafkaStreamLoader from snuba.query.conditions import ConditionFunctions, binary_condition from snuba.query.expressions import Column, Literal from snuba.query.processors.arrayjoin_keyvalue_optimizer import ( ArrayJoinKeyValueOptimizer, ) from snuba.query.processors.mapping_promoter import MappingColumnPromoter from snuba.query.processors.prewhere import PrewhereProcessor all_columns = ColumnSet([ ("org_id", UInt(64)), ("project_id", UInt(64)), ("timestamp", DateTime()), ("event_id", UUID()), ("event_hash", ReadOnly(UInt(64))), ("platform", String()), ("environment", Nullable(String())), ("release", Nullable(String())), ("dist", Nullable(String())), ("ip_address_v4", Nullable(IPv4())), ("ip_address_v6", Nullable(IPv6())), ("user", String()), ("user_hash", ReadOnly(UInt(64))), ("user_id", Nullable(String())), ("user_name", Nullable(String())), ("user_email", Nullable(String())), ("sdk_name", Nullable(String())), ("sdk_version", Nullable(String())), ("http_method", Nullable(String())), ("http_referer", Nullable(String())), ("tags", Nested([("key", String()), ("value", String())])),
from snuba.datasets.storages import StorageKey from snuba.datasets.table_storage import KafkaStreamLoader from snuba.query.processors.prewhere import PrewhereProcessor WRITE_LOCAL_TABLE_NAME = "outcomes_raw_local" WRITE_DIST_TABLE_NAME = "outcomes_raw_dist" READ_LOCAL_TABLE_NAME = "outcomes_hourly_local" READ_DIST_TABLE_NAME = "outcomes_hourly_dist" write_columns = ColumnSet([ ("org_id", UInt(64)), ("project_id", UInt(64)), ("key_id", Nullable(UInt(64))), ("timestamp", DateTime()), ("outcome", UInt(8)), ("reason", LowCardinality(Nullable(String()))), ("event_id", Nullable(UUID())), ]) raw_schema = MergeTreeSchema( columns=write_columns, # TODO: change to outcomes.raw_local when we add multi DB support local_table_name=WRITE_LOCAL_TABLE_NAME, dist_table_name=WRITE_DIST_TABLE_NAME, storage_set_key=StorageSetKey.OUTCOMES, order_by="(org_id, project_id, timestamp)", partition_by="(toMonday(timestamp))", settings={"index_granularity": "16384"}, ) read_columns = ColumnSet([
def __forward_migrations( self, table_name: str) -> Sequence[operations.Operation]: return [ operations.AddColumn( storage_set=StorageSetKey.QUERYLOG, table_name=table_name, column=Column( "clickhouse_queries.all_columns", WithDefault( Array(Array(LowCardinality(String()))), "arrayResize([['']], length(clickhouse_queries.sql))", ), ), after="clickhouse_queries.consistent", ), operations.AddColumn( storage_set=StorageSetKey.QUERYLOG, table_name=table_name, column=Column( "clickhouse_queries.or_conditions", WithDefault( Array(UInt(8)), "arrayResize([0], length(clickhouse_queries.sql))", ), ), after="clickhouse_queries.all_columns", ), operations.AddColumn( storage_set=StorageSetKey.QUERYLOG, table_name=table_name, column=Column( "clickhouse_queries.where_columns", WithDefault( Array(Array(LowCardinality(String()))), "arrayResize([['']], length(clickhouse_queries.sql))", ), ), after="clickhouse_queries.or_conditions", ), operations.AddColumn( storage_set=StorageSetKey.QUERYLOG, table_name=table_name, column=Column( "clickhouse_queries.where_mapping_columns", WithDefault( Array(Array(LowCardinality(String()))), "arrayResize([['']], length(clickhouse_queries.sql))", ), ), after="clickhouse_queries.where_columns", ), operations.AddColumn( storage_set=StorageSetKey.QUERYLOG, table_name=table_name, column=Column( "clickhouse_queries.groupby_columns", WithDefault( Array(Array(LowCardinality(String()))), "arrayResize([['']], length(clickhouse_queries.sql))", ), ), after="clickhouse_queries.where_mapping_columns", ), operations.AddColumn( storage_set=StorageSetKey.QUERYLOG, table_name=table_name, column=Column( "clickhouse_queries.array_join_columns", WithDefault( Array(Array(LowCardinality(String()))), "arrayResize([['']], length(clickhouse_queries.sql))", ), ), after="clickhouse_queries.groupby_columns", ), ]
def forwards_local(self) -> Sequence[operations.Operation]: return [ operations.ModifyColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column("duration", UInt(32)), ), operations.AddColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column( "sdk_name", String(Modifiers(low_cardinality=True, default="''"))), after="user_email", ), operations.AddColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column( "sdk_version", String(Modifiers(low_cardinality=True, default="''"))), after="sdk_name", ), operations.AddColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column( "transaction_status", UInt(8, Modifiers(default=str(UNKNOWN_SPAN_STATUS))), ), after="transaction_op", ), operations.AddColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column("_tags_flattened", String()), after="tags", ), operations.AddColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column("_contexts_flattened", String()), after="contexts", ), operations.AddColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column( "user_hash", UInt(64, Modifiers(materialized="cityHash64(user)"))), after="user", ), # The following columns were originally created as non low cardinality strings operations.ModifyColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column("transaction_name", String(Modifiers(low_cardinality=True))), ), operations.ModifyColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column( "release", String(Modifiers(nullable=True, low_cardinality=True))), ), operations.ModifyColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column( "dist", String(Modifiers(nullable=True, low_cardinality=True))), ), operations.ModifyColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column( "sdk_name", String(Modifiers(low_cardinality=True, default="''"))), ), operations.ModifyColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column( "sdk_version", String(Modifiers(low_cardinality=True, default="''"))), ), operations.ModifyColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column( "environment", String(Modifiers(nullable=True, low_cardinality=True)), ), ), operations.AddColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column=Column("message_timestamp", DateTime()), after="offset", ), operations.DropColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column_name="_start_date", ), operations.DropColumn( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", column_name="_finish_date", ), ]
from snuba.datasets.storages import StorageKey from snuba.datasets.table_storage import KafkaStreamLoader from snuba.query.processors.prewhere import PrewhereProcessor WRITE_LOCAL_TABLE_NAME = "outcomes_raw_local" WRITE_DIST_TABLE_NAME = "outcomes_raw_dist" READ_LOCAL_TABLE_NAME = "outcomes_hourly_local" READ_DIST_TABLE_NAME = "outcomes_hourly_dist" write_columns = ColumnSet([ ("org_id", UInt(64)), ("project_id", UInt(64)), ("key_id", UInt(64, Modifiers(nullable=True))), ("timestamp", DateTime()), ("outcome", UInt(8)), ("reason", String(Modifiers(nullable=True))), ("event_id", UUID(Modifiers(nullable=True))), ]) raw_schema = WritableTableSchema( columns=write_columns, # TODO: change to outcomes.raw_local when we add multi DB support local_table_name=WRITE_LOCAL_TABLE_NAME, dist_table_name=WRITE_DIST_TABLE_NAME, storage_set_key=StorageSetKey.OUTCOMES, ) read_columns = ColumnSet([ ("org_id", UInt(64)), ("project_id", UInt(64)), ("key_id", UInt(64)),
def test_events_promoted_boolean_context() -> None: columns = ColumnSet([ ("device_charging", UInt(8, Modifier(nullable=True))), ("contexts", Nested([("key", String()), ("value", String())])), ]) query = ClickhouseQuery( Table("events", columns), selected_columns=[ SelectedExpression( "contexts[device.charging]", FunctionCall( "contexts[device.charging]", "arrayElement", ( Column(None, None, "contexts.value"), FunctionCall( None, "indexOf", ( Column(None, None, "contexts.key"), Literal(None, "device.charging"), ), ), ), ), ) ], ) expected = ClickhouseQuery( Table("events", columns), selected_columns=[ SelectedExpression( "contexts[device.charging]", FunctionCall( "contexts[device.charging]", "if", ( binary_condition( ConditionFunctions.IN, FunctionCall( None, "toString", (Column(None, None, "device_charging"), ), ), literals_tuple( None, [Literal(None, "1"), Literal(None, "True")]), ), Literal(None, "True"), Literal(None, "False"), ), ), ) ], ) settings = HTTPRequestSettings() MappingColumnPromoter({ "contexts": { "device.charging": "device_charging" } }, cast_to_string=True).process_query(query, settings) EventsPromotedBooleanContextsProcessor().process_query(query, settings) assert query.get_selected_columns() == expected.get_selected_columns()
from snuba.migrations.columns import MigrationModifiers as Modifiers from snuba.migrations.parse_schema import _get_column test_data = [ # Basic types (("Date", "", "", ""), Date()), (("DateTime", "", "", ""), DateTime()), ( ("Enum8('success' = 0, 'error' = 1)", "", "", ""), Enum([("success", 0), ("error", 1)]), ), (("FixedString(32)", "", "", ""), FixedString(32)), (("Float32", "", "", ""), Float(32)), (("IPv4", "", "", ""), IPv4()), (("IPv6", "", "", ""), IPv6()), (("String", "", "", ""), String()), (("UInt32", "", "", ""), UInt(32)), (("UUID", "", "", ""), UUID()), # Aggregate functions ( ("AggregateFunction(uniq, UInt8)", "", "", ""), AggregateFunction("uniq", [UInt(8)]), ), ( ("AggregateFunction(countIf, UUID, UInt8)", "", "", ""), AggregateFunction("countIf", [UUID(), UInt(8)]), ), ( ("AggregateFunction(quantileIf(0.5, 0.9), UInt32, UInt8)", "", "", ""), AggregateFunction("quantileIf(0.5, 0.9)", [UInt(32), UInt(8)]), ),
def forwards_local(self) -> Sequence[operations.SqlOperation]: return [ operations.AddColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column=Column("group_id", UInt(64)), after="project_id", ), operations.DropColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column_name="device_model", ), operations.AddColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column=Column("sdk_integrations", Array(String())), after="exception_frames", ), operations.AddColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column=Column("modules.name", Nested([("name", String())])), after="sdk_integrations", ), operations.AddColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column=Column("culprit", String(Modifiers(nullable=True))), after="sdk_integrations", ), operations.AddColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column=Column("search_message", String(Modifiers(nullable=True))), after="received", ), operations.AddColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column=Column("title", String(Modifiers(nullable=True))), after="search_message", ), operations.AddColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column=Column("location", String(Modifiers(nullable=True))), after="title", ), operations.AddColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column=Column("_tags_flattened", String()), after="tags", ), operations.AddColumn( storage_set=StorageSetKey.EVENTS, table_name="sentry_local", column=Column("message_timestamp", DateTime()), after="partition", ), ]
from snuba.query.data_source.join import ( IndividualNode, JoinClause, JoinCondition, JoinConditionExpression, JoinModifier, JoinType, ) from snuba.query.data_source.simple import Table from snuba.query.expressions import Column, CurriedFunctionCall, FunctionCall, Literal ERRORS_SCHEMA = ColumnSet( [ ("event_id", UUID()), ("project_id", UInt(32)), ("message", String()), ("group_id", UInt(32)), ] ) GROUPS_SCHEMA = ColumnSet( [ ("id", UInt(32)), ("project_id", UInt(32)), ("group_id", UInt(32)), ("message", String()), ] ) GROUPS_ASSIGNEE = ColumnSet([("id", UInt(32)), ("user", String())]) node_err = IndividualNode(alias="err", data_source=Table("errors_local", ERRORS_SCHEMA)) node_group = IndividualNode(
import pytest from snuba.clickhouse.columns import ColumnSet, Nested from snuba.clickhouse.columns import SchemaModifiers as Modifiers from snuba.clickhouse.columns import String, UInt from snuba.clickhouse.query import Query as ClickhouseQuery from snuba.query import SelectedExpression from snuba.query.data_source.simple import Table from snuba.query.expressions import Column, FunctionCall, Literal from snuba.query.processors.mapping_promoter import MappingColumnPromoter from snuba.query.query_settings import HTTPQuerySettings columns = ColumnSet( [ ("promoted", UInt(8, Modifiers(nullable=True))), ("tags", Nested([("key", String()), ("value", String())])), ] ) test_cases = [ ( "not promoted", ClickhouseQuery( Table("events", columns), selected_columns=[ SelectedExpression( "tags[foo]", FunctionCall( "tags[foo]", "arrayElement", (
def __init__(self) -> None: self.__common_columns = ColumnSet( [ ("event_id", FixedString(32)), ("project_id", UInt(64)), ("type", Nullable(String())), ("timestamp", DateTime()), ("platform", Nullable(String())), ("environment", Nullable(String())), ("release", Nullable(String())), ("dist", Nullable(String())), ("user", Nullable(String())), ("transaction", Nullable(String())), ("message", Nullable(String())), ("title", Nullable(String())), # User ("user_id", Nullable(String())), ("username", Nullable(String())), ("email", Nullable(String())), ("ip_address", Nullable(String())), # SDK ("sdk_name", Nullable(String())), ("sdk_version", Nullable(String())), # geo location context ("geo_country_code", Nullable(String())), ("geo_region", Nullable(String())), ("geo_city", Nullable(String())), ("http_method", Nullable(String())), ("http_referer", Nullable(String())), # Other tags and context ("tags", Nested([("key", String()), ("value", String())])), ("contexts", Nested([("key", String()), ("value", String())])), ] ) self.__events_columns = ColumnSet( [ ("group_id", Nullable(UInt(64))), ("primary_hash", Nullable(FixedString(32))), # Promoted tags ("level", Nullable(String())), ("logger", Nullable(String())), ("server_name", Nullable(String())), ("site", Nullable(String())), ("url", Nullable(String())), ("search_message", Nullable(String())), ("location", Nullable(String())), ("culprit", Nullable(String())), ("received", Nullable(DateTime())), ("sdk_integrations", Nullable(Array(String()))), ("version", Nullable(String())), # exception interface ( "exception_stacks", Nested( [ ("type", Nullable(String())), ("value", Nullable(String())), ("mechanism_type", Nullable(String())), ("mechanism_handled", Nullable(UInt(8))), ] ), ), ( "exception_frames", Nested( [ ("abs_path", Nullable(String())), ("filename", Nullable(String())), ("package", Nullable(String())), ("module", Nullable(String())), ("function", Nullable(String())), ("in_app", Nullable(UInt(8))), ("colno", Nullable(UInt(32))), ("lineno", Nullable(UInt(32))), ("stack_level", UInt(16)), ] ), ), ("modules", Nested([("name", String()), ("version", String())])), ] ) self.__transactions_columns = ColumnSet( [ ("trace_id", Nullable(UUID())), ("span_id", Nullable(UInt(64))), ("transaction_hash", Nullable(UInt(64))), ("transaction_op", Nullable(String())), ("transaction_status", Nullable(UInt(8))), ("duration", Nullable(UInt(32))), ( "measurements", Nested([("key", LowCardinality(String())), ("value", Float(64))]), ), ] ) events_storage = get_storage(StorageKey.EVENTS) events_ro_storage = get_storage(StorageKey.EVENTS_RO) transactions_storage = get_storage(StorageKey.TRANSACTIONS) self.__time_group_columns: Mapping[str, str] = {} self.__time_parse_columns = ("timestamp",) super().__init__( storages=[events_storage, transactions_storage], query_plan_builder=SelectedStorageQueryPlanBuilder( selector=DiscoverQueryStorageSelector( events_table=events_storage, events_ro_table=events_ro_storage, abstract_events_columns=self.__events_columns, transactions_table=transactions_storage, abstract_transactions_columns=self.__transactions_columns, ), ), abstract_column_set=( self.__common_columns + self.__events_columns + self.__transactions_columns ), writable_storage=None, )
def __init__(self) -> None: self.__common_columns = ColumnSet([ ("event_id", FixedString(32)), ("project_id", UInt(64)), ("type", String(Modifiers(nullable=True))), ("timestamp", DateTime()), ("platform", String(Modifiers(nullable=True))), ("environment", String(Modifiers(nullable=True))), ("release", String(Modifiers(nullable=True))), ("dist", String(Modifiers(nullable=True))), ("user", String(Modifiers(nullable=True))), ("transaction", String(Modifiers(nullable=True))), ("message", String(Modifiers(nullable=True))), ("title", String(Modifiers(nullable=True))), # User ("user_id", String(Modifiers(nullable=True))), ("username", String(Modifiers(nullable=True))), ("email", String(Modifiers(nullable=True))), ("ip_address", String(Modifiers(nullable=True))), # SDK ("sdk_name", String(Modifiers(nullable=True))), ("sdk_version", String(Modifiers(nullable=True))), # geo location context ("geo_country_code", String(Modifiers(nullable=True))), ("geo_region", String(Modifiers(nullable=True))), ("geo_city", String(Modifiers(nullable=True))), ("http_method", String(Modifiers(nullable=True))), ("http_referer", String(Modifiers(nullable=True))), # Other tags and context ("tags", Nested([("key", String()), ("value", String())])), ("contexts", Nested([("key", String()), ("value", String())])), ("trace_id", String(Modifiers(nullable=True))), ("span_id", UInt(64, Modifiers(nullable=True))), ]) self.__events_columns = EVENTS_COLUMNS self.__transactions_columns = TRANSACTIONS_COLUMNS discover_storage = get_storage(StorageKey.DISCOVER) discover_storage_plan_builder = SingleStorageQueryPlanBuilder( storage=discover_storage, mappers=events_translation_mappers. concat(transaction_translation_mappers).concat( null_function_translation_mappers).concat( TranslationMappers(columns=[ ColumnToFunction( None, "ip_address", "coalesce", ( FunctionCall( None, "IPv4NumToString", (Column(None, None, "ip_address_v4"), ), ), FunctionCall( None, "IPv6NumToString", (Column(None, None, "ip_address_v6"), ), ), ), ), ColumnToColumn(None, "transaction", None, "transaction_name"), ColumnToColumn(None, "username", None, "user_name"), ColumnToColumn(None, "email", None, "user_email"), ColumnToMapping( None, "geo_country_code", None, "contexts", "geo.country_code", nullable=True, ), ColumnToMapping( None, "geo_region", None, "contexts", "geo.region", nullable=True, ), ColumnToMapping( None, "geo_city", None, "contexts", "geo.city", nullable=True, ), ColumnToFunction( None, "user", "nullIf", (Column(None, None, "user"), Literal(None, "")), ), ])).concat( TranslationMappers(subscriptables=[ SubscriptableMapper(None, "tags", None, "tags"), SubscriptableMapper(None, "contexts", None, "contexts"), ], )), ) discover_pipeline_builder = SimplePipelineBuilder( query_plan_builder=discover_storage_plan_builder) super().__init__( storages=[discover_storage], query_pipeline_builder=discover_pipeline_builder, abstract_column_set=(self.__common_columns + self.__events_columns + self.__transactions_columns), join_relationships={}, writable_storage=None, validators=[EntityRequiredColumnValidator({"project_id"})], required_time_column="timestamp", )
def test_events_boolean_context() -> None: columns = ColumnSet([("contexts", Nested([("key", String()), ("value", String())]))]) query = ClickhouseQuery( Table("errors", columns), selected_columns=[ SelectedExpression( "contexts[device.charging]", FunctionCall( "contexts[device.charging]", "arrayElement", ( Column(None, None, "contexts.value"), FunctionCall( None, "indexOf", ( Column(None, None, "contexts.key"), Literal(None, "device.charging"), ), ), ), ), ) ], ) expected = ClickhouseQuery( Table("errors", columns), selected_columns=[ SelectedExpression( "contexts[device.charging]", FunctionCall( "contexts[device.charging]", "if", ( binary_condition( ConditionFunctions.IN, FunctionCall( None, "arrayElement", ( Column(None, None, "contexts.value"), FunctionCall( None, "indexOf", ( Column(None, None, "contexts.key"), Literal(None, "device.charging"), ), ), ), ), literals_tuple( None, [Literal(None, "1"), Literal(None, "True")]), ), Literal(None, "True"), Literal(None, "False"), ), ), ) ], ) settings = HTTPRequestSettings() EventsBooleanContextsProcessor().process_query(query, settings) assert query.get_selected_columns() == expected.get_selected_columns()
Enum, Float, Nested, String, UInt, ) from snuba.clusters.storage_sets import StorageSetKey from snuba.migrations import migration, operations, table_engines from snuba.migrations.columns import MigrationModifiers as Modifiers status_type = Enum[Modifiers]([("success", 0), ("error", 1), ("rate-limited", 2)]) columns: Sequence[Column[Modifiers]] = [ Column("request_id", UUID()), Column("request_body", String()), Column("referrer", String(Modifiers(low_cardinality=True))), Column("dataset", String(Modifiers(low_cardinality=True))), Column("projects", Array(UInt(64))), Column("organization", UInt(64, Modifiers(nullable=True))), Column("timestamp", DateTime()), Column("duration_ms", UInt(32)), Column("status", status_type), Column( "clickhouse_queries", Nested([ Column("sql", String()), Column("status", status_type), Column("trace_id", UUID(Modifiers(nullable=True))), Column("duration_ms", UInt(32)), Column("stats", String()),
String, UInt, ) from snuba.clusters.storage_sets import StorageSetKey from snuba.migrations import migration, operations, table_engines from snuba.migrations.columns import Materialized from snuba.migrations.columns import MigrationModifiers as Modifiers UNKNOWN_SPAN_STATUS = 2 columns = [ Column("project_id", UInt(64)), Column("event_id", UUID()), Column("trace_id", UUID()), Column("span_id", UInt(64)), Column("transaction_name", String(Modifiers(low_cardinality=True))), Column( "transaction_hash", UInt(64, Modifiers(materialized="cityHash64(transaction_name)")), ), Column("transaction_op", String(Modifiers(low_cardinality=True))), Column("transaction_status", UInt(8, Modifiers(default=str(UNKNOWN_SPAN_STATUS)))), Column("start_ts", DateTime()), Column("start_ms", UInt(16)), Column("finish_ts", DateTime()), Column("finish_ms", UInt(16)), Column("duration", UInt(32)), Column("platform", String(Modifiers(low_cardinality=True))), Column("environment", String(Modifiers(nullable=True, low_cardinality=True))),
Column, DateTime, IPv4, IPv6, Nested, String, UInt, ) from snuba.clusters.storage_sets import StorageSetKey from snuba.migrations import migration, operations, table_engines from snuba.migrations.columns import MigrationModifiers as Modifiers columns: List[Column[Modifiers]] = [ Column("event_id", UUID()), Column("project_id", UInt(64)), Column("type", String(Modifiers(low_cardinality=True))), Column("timestamp", DateTime()), Column("platform", String(Modifiers(low_cardinality=True))), Column("environment", String(Modifiers(low_cardinality=True, nullable=True))), Column("release", String(Modifiers(low_cardinality=True, nullable=True))), Column("dist", String(Modifiers(low_cardinality=True, nullable=True))), Column("transaction_name", String(Modifiers(low_cardinality=True, nullable=True))), Column("message", String(Modifiers(nullable=True))), Column("title", String(Modifiers(nullable=True))), Column("user", String(Modifiers(low_cardinality=True))), Column("user_hash", UInt(64)), Column("user_id", String(Modifiers(nullable=True))), Column("user_name", String(Modifiers(nullable=True))), Column("user_email", String(Modifiers(nullable=True))),
strategy = SimpleQueryPlanExecutionStrategy( ClickhouseCluster("localhost", 1024, "default", "", "default", 80, set(), True), [], [ ColumnSplitQueryStrategy(id_column, project_column, timestamp_column), TimeSplitQueryStrategy(timestamp_col=timestamp_column), ], ) strategy.execute(query, HTTPRequestSettings(), do_query) column_set = ColumnSet([ ("event_id", String()), ("project_id", String()), ("timestamp", String()), ("level", String()), ("logger", String()), ("server_name", String()), ("transaction", String()), ]) column_split_tests = [ ( "event_id", "project_id", "timestamp", { "selected_columns": [
from snuba.clickhouse.columns import SchemaModifiers as Modifiers from snuba.clickhouse.columns import String, UInt from snuba.clickhouse.query import Query from snuba.clusters.storage_sets import StorageSetKey from snuba.datasets.schemas.tables import TableSchema from snuba.datasets.storage import ReadableTableStorage from snuba.datasets.storages import StorageKey from snuba.query import SelectedExpression from snuba.query.data_source.simple import Table from snuba.query.expressions import Column, FunctionCall, Literal from snuba.query.processors.null_column_caster import NullColumnCaster from snuba.query.query_settings import HTTPQuerySettings columns1 = ColumnSet([ ("not_mismatched", DateTime()), ("mismatched1", String(Modifiers(nullable=True))), ("mismatched2", UInt(64, Modifiers(nullable=True))), ]) columns2 = ColumnSet([ ("timestamp", DateTime()), ("mismatched1", String()), # non-nullable by default ("mismatched2", UInt(64, Modifiers(nullable=False))), ]) schema1 = TableSchema( columns=columns1, local_table_name="discover_local", dist_table_name="discover_dist", storage_set_key=StorageSetKey.DISCOVER, mandatory_conditions=[],
) from snuba.clusters.storage_sets import StorageSetKey from snuba.migrations import migration, operations, table_engines from snuba.migrations.columns import MigrationModifiers as Modifiers raw_columns: Sequence[Column[Modifiers]] = [ Column("replay_id", UUID()), Column("sequence_id", UInt(16)), Column("trace_ids", Array(UUID())), Column( "_trace_ids_hashed", UInt( 64, Modifiers(materialized="arrayMap(t -> cityHash64(t), trace_ids)")), ), Column("title", String()), ### columns used by other sentry events Column("project_id", UInt(64)), # time columns Column("timestamp", DateTime()), # release/environment info Column("platform", String(Modifiers(low_cardinality=True))), Column("environment", String(Modifiers(nullable=True, low_cardinality=True))), Column("release", String(Modifiers(nullable=True))), Column("dist", String(Modifiers(nullable=True))), Column("ip_address_v4", IPv4(Modifiers(nullable=True))), Column("ip_address_v6", IPv6(Modifiers(nullable=True))), # user columns Column("user", String()), Column("user_hash", UInt(64)),
from snuba.migrations import migration, operations, table_engines from snuba.processor import MAX_UINT32, NIL_UUID raw_columns = [ Column("session_id", UUID()), Column("distinct_id", UUID()), Column("seq", UInt(64)), Column("org_id", UInt(64)), Column("project_id", UInt(64)), Column("retention_days", UInt(16)), Column("duration", UInt(32)), Column("status", UInt(8)), Column("errors", UInt(16)), Column("received", DateTime()), Column("started", DateTime()), Column("release", LowCardinality(String())), Column("environment", LowCardinality(String())), ] aggregate_columns = [ Column("org_id", UInt(64)), Column("project_id", UInt(64)), Column("started", DateTime()), Column("release", LowCardinality(String())), Column("environment", LowCardinality(String())), Column( "duration_quantiles", AggregateFunction("quantilesIf(0.5, 0.9)", UInt(32), UInt(8)), ), Column("sessions", AggregateFunction("countIf", UUID(), UInt(8))), Column("users", AggregateFunction("uniqIf", UUID(), UInt(8))),
IPv4, IPv6, Nested, String, UInt, ) from snuba.clusters.storage_sets import StorageSetKey from snuba.datasets.storages.tags_hash_map import TAGS_HASH_MAP_COLUMN from snuba.migrations import migration, operations, table_engines from snuba.migrations.columns import MigrationModifiers as Modifiers columns = [ Column("project_id", UInt(64)), Column("timestamp", DateTime()), Column("event_id", UUID(Modifiers(codecs=["NONE"]))), Column("platform", String(Modifiers(low_cardinality=True))), Column("environment", String(Modifiers(nullable=True, low_cardinality=True))), Column("release", String(Modifiers(nullable=True, low_cardinality=True))), Column("dist", String(Modifiers(nullable=True, low_cardinality=True))), Column("ip_address_v4", IPv4(Modifiers(nullable=True))), Column("ip_address_v6", IPv6(Modifiers(nullable=True))), Column("user", String(Modifiers(default="''"))), Column("user_hash", UInt(64, Modifiers(materialized="cityHash64(user)"))), Column("user_id", String(Modifiers(nullable=True))), Column("user_name", String(Modifiers(nullable=True))), Column("user_email", String(Modifiers(nullable=True))), Column("sdk_name", String(Modifiers(nullable=True, low_cardinality=True))), Column("sdk_version", String(Modifiers(nullable=True, low_cardinality=True))), Column("http_method", String(Modifiers(nullable=True,
AggregateFunction, Column, DateTime, String, UInt, ) from snuba.clusters.storage_sets import StorageSetKey from snuba.migrations import operations from snuba.migrations.columns import MigrationModifiers as Modifiers from snuba.processor import MAX_UINT32, NIL_UUID aggregate_columns_v1: Sequence[Column[Modifiers]] = [ Column("org_id", UInt(64)), Column("project_id", UInt(64)), Column("started", DateTime()), Column("release", String(Modifiers(low_cardinality=True))), Column("environment", String(Modifiers(low_cardinality=True))), Column( "duration_quantiles", AggregateFunction("quantilesIf(0.5, 0.9)", [UInt(32), UInt(8)]), ), Column("sessions", AggregateFunction("countIf", [UUID(), UInt(8)])), Column("users", AggregateFunction("uniqIf", [UUID(), UInt(8)])), Column("sessions_crashed", AggregateFunction("countIf", [UUID(), UInt(8)])), Column("sessions_abnormal", AggregateFunction("countIf", [UUID(), UInt(8)])), Column("sessions_errored", AggregateFunction("uniqIf", [UUID(), UInt(8)])), Column("users_crashed", AggregateFunction("uniqIf",
from snuba.datasets.spans_processor import UNKNOWN_SPAN_STATUS, SpansMessageProcessor from snuba.datasets.storage import WritableTableStorage from snuba.datasets.storages import StorageKey from snuba.datasets.storages.tags_hash_map import TAGS_HASH_MAP_COLUMN from snuba.datasets.table_storage import KafkaStreamLoader from snuba.query.processors.prewhere import PrewhereProcessor from snuba.web.split import TimeSplitQueryStrategy columns = ColumnSet([ ("project_id", UInt(64)), ("transaction_id", UUID()), ("trace_id", UUID()), ("transaction_span_id", UInt(64)), ("span_id", UInt(64)), ("parent_span_id", Nullable(UInt(64))), ("transaction_name", LowCardinality(String())), ("description", String()), # description in span ("op", LowCardinality(String())), ( "status", WithDefault(UInt(8), str(UNKNOWN_SPAN_STATUS)), ), ("start_ts", DateTime()), ("start_ns", UInt(32)), ("finish_ts", DateTime()), ("finish_ns", UInt(32)), ("duration_ms", UInt(32)), ("tags", Nested([("key", String()), ("value", String())])), ("_tags_hash_map", Materialized(Array(UInt(64)), TAGS_HASH_MAP_COLUMN)), ("retention_days", UInt(16)), ("deleted", UInt(8)),
def __init__(self): metadata_columns = ColumnSet([ # optional stream related data ('offset', Nullable(UInt(64))), ('partition', Nullable(UInt(16))), ]) promoted_tag_columns = ColumnSet([ # These are the classic tags, they are saved in Snuba exactly as they # appear in the event body. ('level', Nullable(String())), ('logger', Nullable(String())), ('server_name', Nullable(String())), # future name: device_id? ('transaction', Nullable(String())), ('environment', Nullable(String())), ('sentry:release', Nullable(String())), ('sentry:dist', Nullable(String())), ('sentry:user', Nullable(String())), ('site', Nullable(String())), ('url', Nullable(String())), ]) promoted_context_tag_columns = ColumnSet([ # These are promoted tags that come in in `tags`, but are more closely # related to contexts. To avoid naming confusion with Clickhouse nested # columns, they are stored in the database with s/./_/ # promoted tags ('app_device', Nullable(String())), ('device', Nullable(String())), ('device_family', Nullable(String())), ('runtime', Nullable(String())), ('runtime_name', Nullable(String())), ('browser', Nullable(String())), ('browser_name', Nullable(String())), ('os', Nullable(String())), ('os_name', Nullable(String())), ('os_rooted', Nullable(UInt(8))), ]) promoted_context_columns = ColumnSet([ ('os_build', Nullable(String())), ('os_kernel_version', Nullable(String())), ('device_name', Nullable(String())), ('device_brand', Nullable(String())), ('device_locale', Nullable(String())), ('device_uuid', Nullable(String())), ('device_model_id', Nullable(String())), ('device_arch', Nullable(String())), ('device_battery_level', Nullable(Float(32))), ('device_orientation', Nullable(String())), ('device_simulator', Nullable(UInt(8))), ('device_online', Nullable(UInt(8))), ('device_charging', Nullable(UInt(8))), ]) required_columns = ColumnSet([ ('event_id', FixedString(32)), ('project_id', UInt(64)), ('group_id', UInt(64)), ('timestamp', DateTime()), ('deleted', UInt(8)), ('retention_days', UInt(16)), ]) all_columns = required_columns + [ # required for non-deleted ('platform', Nullable(String())), ('message', Nullable(String())), ('primary_hash', Nullable(FixedString(32))), ('received', Nullable(DateTime())), ('search_message', Nullable(String())), ('title', Nullable(String())), ('location', Nullable(String())), # optional user ('user_id', Nullable(String())), ('username', Nullable(String())), ('email', Nullable(String())), ('ip_address', Nullable(String())), # optional geo ('geo_country_code', Nullable(String())), ('geo_region', Nullable(String())), ('geo_city', Nullable(String())), ('sdk_name', Nullable(String())), ('sdk_version', Nullable(String())), ('type', Nullable(String())), ('version', Nullable(String())), ] + metadata_columns \ + promoted_context_columns \ + promoted_tag_columns \ + promoted_context_tag_columns \ + [ # other tags ('tags', Nested([ ('key', String()), ('value', String()), ])), # other context ('contexts', Nested([ ('key', String()), ('value', String()), ])), # http interface ('http_method', Nullable(String())), ('http_referer', Nullable(String())), # exception interface ('exception_stacks', Nested([ ('type', Nullable(String())), ('value', Nullable(String())), ('mechanism_type', Nullable(String())), ('mechanism_handled', Nullable(UInt(8))), ])), ('exception_frames', Nested([ ('abs_path', Nullable(String())), ('filename', Nullable(String())), ('package', Nullable(String())), ('module', Nullable(String())), ('function', Nullable(String())), ('in_app', Nullable(UInt(8))), ('colno', Nullable(UInt(32))), ('lineno', Nullable(UInt(32))), ('stack_level', UInt(16)), ])), # These are columns we added later in the life of the (current) production # database. They don't necessarily belong here in a logical/readability sense # but they are here to match the order of columns in production becase # `insert_distributed_sync` is very sensitive to column existence and ordering. ('culprit', Nullable(String())), ('sdk_integrations', Array(String())), ('modules', Nested([ ('name', String()), ('version', String()), ])), ] sample_expr = 'cityHash64(toString(event_id))' schema = ReplacingMergeTreeSchema( columns=all_columns, local_table_name='sentry_local', dist_table_name='sentry_dist', mandatory_conditions=[('deleted', '=', 0)], order_by='(project_id, toStartOfDay(timestamp), %s)' % sample_expr, partition_by='(toMonday(timestamp), if(equals(retention_days, 30), 30, 90))', version_column='deleted', sample_expr=sample_expr, migration_function=events_migrations) dataset_schemas = DatasetSchemas( read_schema=schema, write_schema=schema, ) table_writer = TableWriter( write_schema=schema, stream_loader=KafkaStreamLoader( processor=EventsProcessor(promoted_tag_columns), default_topic="events", replacement_topic="event-replacements", commit_log_topic="snuba-commit-log", ) ) super(EventsDataset, self).__init__( dataset_schemas=dataset_schemas, table_writer=table_writer, time_group_columns={ 'time': 'timestamp', 'rtime': 'received' }, time_parse_columns=('timestamp', 'received') ) self.__metadata_columns = metadata_columns self.__promoted_tag_columns = promoted_tag_columns self.__promoted_context_tag_columns = promoted_context_tag_columns self.__promoted_context_columns = promoted_context_columns self.__required_columns = required_columns self.__tags_processor = TagColumnProcessor( columns=all_columns, promoted_columns=self._get_promoted_columns(), column_tag_map=self._get_column_tag_map(), )
def attempt_map( self, expression: SubscriptableReference, children_translator: SnubaClickhouseStrictTranslator, ) -> Optional[FunctionCall]: if expression.column.column_name in self.subscript_names: return identity(Literal(None, None), expression.alias) else: return None EVENTS_COLUMNS = ColumnSet([ ("group_id", UInt(64, Modifiers(nullable=True))), ("primary_hash", FixedString(32, Modifiers(nullable=True))), # Promoted tags ("level", String(Modifiers(nullable=True))), ("logger", String(Modifiers(nullable=True))), ("server_name", String(Modifiers(nullable=True))), ("site", String(Modifiers(nullable=True))), ("url", String(Modifiers(nullable=True))), ("location", String(Modifiers(nullable=True))), ("culprit", String(Modifiers(nullable=True))), ("received", DateTime(Modifiers(nullable=True))), ("sdk_integrations", Array(String(), Modifiers(nullable=True))), ("version", String(Modifiers(nullable=True))), # exception interface ( "exception_stacks", Nested([ ("type", String(Modifiers(nullable=True))), ("value", String(Modifiers(nullable=True))),
from typing import Sequence from snuba.clickhouse.columns import Column, DateTime, Enum, String, UInt, WithDefault from snuba.clusters.storage_sets import StorageSetKey from snuba.migrations import migration, operations from snuba.migrations.context import Context from snuba.migrations.status import Status from snuba.migrations.table_engines import Distributed, ReplacingMergeTree columns = [ Column("group", String()), Column("migration_id", String()), Column("timestamp", DateTime()), Column( "status", Enum([("completed", 0), ("in_progress", 1), ("not_started", 2)]), ), Column("version", WithDefault(UInt(64), "1")), ] class Migration(migration.Migration): """ This migration extends Migration instead of MultiStepMigration since it is responsible for bootstrapping the migration system itself. It skips setting the in progress status in the forwards method and the not started status in the backwards method. Since the migration table doesn't exist yet, we can't write any statuses until this migration is completed. """ blocking = False
from abc import ABC from typing import Mapping, Sequence from unittest.mock import Mock from snuba.clickhouse.columns import UUID, ColumnSet, String, UInt from snuba.datasets.entities import EntityKey from snuba.datasets.entity import Entity from snuba.query.data_source.join import ColumnEquivalence, JoinRelationship, JoinType from snuba.query.extensions import QueryExtension from snuba.query.processors import QueryProcessor EVENTS_SCHEMA = ColumnSet( [ ("event_id", UUID()), ("project_id", UInt(32)), ("message", String()), ("group_id", UInt(32)), ("user_id", UInt(64)), ] ) GROUPS_SCHEMA = ColumnSet( [ ("id", UUID()), ("project_id", UInt(32)), ("message", String()), ("user_id", UInt(64)), ] ) GROUPS_ASSIGNEE = ColumnSet(