コード例 #1
0
 def forwards_dist(self) -> Sequence[operations.SqlOperation]:
     return get_forward_migrations_dist(
         dist_table_name="metrics_counters_dist",
         local_table_name="metrics_counters_local",
         aggregation_col_schema=[
             Column("value", AggregateFunction("sum", [Float(64)])),
         ],
     )
コード例 #2
0
 def forwards_local(self) -> Sequence[operations.SqlOperation]:
     return [
         *self.__forward_migrations("metrics_distributions_local"),
         get_forward_view_migration_polymorphic_table_v2(
             source_table_name=self.raw_table_name,
             table_name="metrics_distributions_local",
             mv_name=get_polymorphic_mv_v3_name("distributions"),
             aggregation_col_schema=COL_SCHEMA_DISTRIBUTIONS_V2,
             aggregation_states=
             ("quantilesState(0.5, 0.75, 0.9, 0.95, 0.99)((arrayJoin(distribution_values) AS values_rows)) as percentiles, "
              "minState(values_rows) as min, "
              "maxState(values_rows) as max, "
              "avgState(values_rows) as avg, "
              "sumState(values_rows) as sum, "
              "countState(values_rows) as count, "
              "histogramState(250)(values_rows) as histogram_buckets"),
             metric_type="distribution",
             materialization_version=4,
         ),
         # No changes in those MV's schema. We just need to recreate the
         # same exact MV as in 0023 for the new materialization_version
         get_forward_view_migration_polymorphic_table_v2(
             source_table_name=self.raw_table_name,
             table_name="metrics_sets_local",
             mv_name=get_polymorphic_mv_v3_name("sets"),
             aggregation_col_schema=[
                 Column("value",
                        AggregateFunction("uniqCombined64", [UInt(64)])),
             ],
             aggregation_states=
             "uniqCombined64State(arrayJoin(set_values)) as value",
             metric_type="set",
             materialization_version=4,
         ),
         get_forward_view_migration_polymorphic_table_v2(
             source_table_name=self.raw_table_name,
             table_name="metrics_counters_local",
             mv_name=get_polymorphic_mv_v3_name("counters"),
             aggregation_col_schema=[
                 Column("value", AggregateFunction("sum", [Float(64)])),
             ],
             aggregation_states="sumState(count_value) as value",
             metric_type="counter",
             materialization_version=4,
         ),
     ]
コード例 #3
0
 def forwards_dist(self) -> Sequence[operations.SqlOperation]:
     return get_forward_migrations_dist(
         dist_table_name="metrics_sets_dist",
         local_table_name="metrics_sets_local",
         aggregation_col_schema=[
             Column("value", AggregateFunction("uniqCombined64",
                                               [UInt(64)])),
         ],
     )
コード例 #4
0
 def forwards_local(self) -> Sequence[operations.SqlOperation]:
     return (get_forward_view_migration_local_consolidated(
         source_table_name="metrics_counters_buckets_local",
         table_name="metrics_counters_local",
         mv_name=get_consolidated_mv_name("counters"),
         aggregation_col_schema=[
             Column("value", AggregateFunction("sum", [Float(64)])),
         ],
         aggregation_states="sumState(value) as value",
     ), )
コード例 #5
0
ファイル: metrics.py プロジェクト: getsentry/snuba
 def __init__(self) -> None:
     super().__init__(
         writable_storage_key=StorageKey.METRICS_RAW,
         readable_storage_key=StorageKey.METRICS_COUNTERS,
         value_schema=[
             Column("value", AggregateFunction("sum", [Float(64)]))
         ],
         mappers=TranslationMappers(functions=[
             FunctionNameMapper("sum", "sumMerge"),
             FunctionNameMapper("sumIf", "sumMergeIf"),
         ], ),
     )
コード例 #6
0
 def forwards_local(self) -> Sequence[operations.SqlOperation]:
     return get_forward_migrations_local(
         source_table_name="metrics_buckets_local",
         table_name="metrics_sets_local",
         mv_name="metrics_sets_mv_local",
         aggregation_col_schema=[
             Column("value", AggregateFunction("uniqCombined64",
                                               [UInt(64)])),
         ],
         aggregation_states=
         "uniqCombined64State(arrayJoin(set_values)) as value",
     )
コード例 #7
0
ファイル: metrics.py プロジェクト: getsentry/snuba
 def __init__(self) -> None:
     super().__init__(
         writable_storage_key=StorageKey.METRICS_RAW,
         readable_storage_key=StorageKey.METRICS_DISTRIBUTIONS,
         value_schema=[
             Column(
                 "percentiles",
                 AggregateFunction("quantiles(0.5, 0.75, 0.9, 0.95, 0.99)",
                                   [Float(64)]),
             ),
             Column("min", AggregateFunction("min", [Float(64)])),
             Column("max", AggregateFunction("max", [Float(64)])),
             Column("avg", AggregateFunction("avg", [Float(64)])),
             Column("sum", AggregateFunction("sum", [Float(64)])),
             Column("count", AggregateFunction("count", [Float(64)])),
             Column(
                 "histogram_buckets",
                 AggregateFunction("histogram(250)", [Float(64)]),
             ),
         ],
         mappers=TranslationMappers(
             functions=[
                 AggregateFunctionMapper("value", "min", "minMerge", "min"),
                 AggregateFunctionMapper("value", "minIf", "minMergeIf",
                                         "min"),
                 AggregateFunctionMapper("value", "max", "maxMerge", "max"),
                 AggregateFunctionMapper("value", "maxIf", "maxMergeIf",
                                         "max"),
                 AggregateFunctionMapper("value", "avg", "avgMerge", "avg"),
                 AggregateFunctionMapper("value", "avgIf", "avgMergeIf",
                                         "avg"),
                 AggregateFunctionMapper("value", "sum", "sumMerge", "sum"),
                 AggregateFunctionMapper("value", "sumIf", "sumMergeIf",
                                         "sum"),
                 AggregateFunctionMapper("value", "count", "countMerge",
                                         "count"),
                 AggregateFunctionMapper("value", "countIf", "countMergeIf",
                                         "count"),
             ],
             curried_functions=[
                 AggregateCurriedFunctionMapper("value", "quantiles",
                                                "quantilesMerge",
                                                "percentiles"),
                 AggregateCurriedFunctionMapper("value", "quantilesIf",
                                                "quantilesMergeIf",
                                                "percentiles"),
                 AggregateCurriedFunctionMapper("value", "histogram",
                                                "histogramMerge",
                                                "histogram_buckets"),
                 AggregateCurriedFunctionMapper("value", "histogramIf",
                                                "histogramMergeIf",
                                                "histogram_buckets"),
             ],
         ),
     )
コード例 #8
0
 def __forward_migrations(
         self, table_name: str) -> Sequence[operations.SqlOperation]:
     return [
         operations.AddColumn(
             storage_set=StorageSetKey.METRICS,
             table_name=table_name,
             column=Column(
                 "histogram_buckets",
                 AggregateFunction("histogram(250)", [Float(64)]),
             ),
             after="count",
         )
     ]
コード例 #9
0
ファイル: metrics.py プロジェクト: getsentry/snuba
 def __init__(self) -> None:
     super().__init__(
         writable_storage_key=StorageKey.METRICS_RAW,
         readable_storage_key=StorageKey.METRICS_SETS,
         value_schema=[
             Column("value", AggregateFunction("uniqCombined64",
                                               [UInt(64)])),
         ],
         mappers=TranslationMappers(functions=[
             FunctionNameMapper("uniq", "uniqCombined64Merge"),
             FunctionNameMapper("uniqIf", "uniqCombined64MergeIf"),
         ], ),
     )
コード例 #10
0
 def visit_agg(self, node: Node,
               visited_children: Iterable[Any]) -> AggregateFunction:
     (
         _agg,
         _paren,
         _sp,
         agg_func,
         _sp,
         _comma,
         _sp,
         agg_types,
         _sp,
         _paren,
     ) = visited_children
     return AggregateFunction(agg_func, *agg_types)
コード例 #11
0
ファイル: metrics.py プロジェクト: pombredanne/snuba
 def __init__(self) -> None:
     super().__init__(
         writable_storage_key=StorageKey.METRICS_COUNTERS_BUCKETS,
         readable_storage_key=StorageKey.METRICS_COUNTERS,
         value_schema=[
             Column("value", AggregateFunction("sum", [Float(64)]))
         ],
         mappers=TranslationMappers(columns=[
             ColumnToFunction(
                 None,
                 "value",
                 "sumMerge",
                 (ColumnExpr(None, None, "value"), ),
             ),
         ], ),
     )
コード例 #12
0
ファイル: templates.py プロジェクト: getsentry/snuba
def get_migration_args_for_counters(
    granularity: int = ORIGINAL_GRANULARITY, ) -> MigrationArgs:
    return {
        "source_table_name":
        "metrics_counters_buckets_local",
        "table_name":
        "metrics_counters_local",
        "mv_name":
        get_mv_name("counters", granularity),
        "aggregation_col_schema": [
            Column("value", AggregateFunction("sum", [Float(64)])),
        ],
        "aggregation_states":
        "sumState(value) as value",
        "granularity":
        granularity,
    }
コード例 #13
0
ファイル: templates.py プロジェクト: getsentry/snuba
def get_migration_args_for_sets(
    granularity: int = ORIGINAL_GRANULARITY, ) -> MigrationArgs:
    return {
        "source_table_name":
        "metrics_buckets_local",
        "table_name":
        "metrics_sets_local",
        "mv_name":
        get_mv_name("sets", granularity),
        "aggregation_col_schema": [
            Column("value", AggregateFunction("uniqCombined64", [UInt(64)])),
        ],
        "aggregation_states":
        "uniqCombined64State(arrayJoin(set_values)) as value",
        "granularity":
        granularity,
    }
コード例 #14
0
ファイル: metrics.py プロジェクト: pombredanne/snuba
 def __init__(self) -> None:
     super().__init__(
         writable_storage_key=StorageKey.METRICS_BUCKETS,
         readable_storage_key=StorageKey.METRICS_SETS,
         value_schema=[
             Column("value", AggregateFunction("uniqCombined64",
                                               [UInt(64)])),
         ],
         mappers=TranslationMappers(columns=[
             ColumnToFunction(
                 None,
                 "value",
                 "uniqCombined64Merge",
                 (ColumnExpr(None, None, "value"), ),
             ),
         ], ),
     )
コード例 #15
0
 def forwards_local(self) -> Sequence[operations.SqlOperation]:
     return [
         get_forward_view_migration_polymorphic_table_v3(
             source_table_name=self.raw_table_name,
             table_name=self.table_name,
             aggregation_col_schema=[
                 Column("value",
                        AggregateFunction("uniqCombined64", [UInt(64)])),
             ],
             aggregation_states=
             "uniqCombined64State(arrayJoin(set_values)) as value",
             mv_name=get_polymorphic_mv_variant_name(
                 "sets", self.mv_version),
             metric_type="set",
             target_mat_version=4,
             appended_where_clause=
             "AND timestamp > toDateTime('2022-03-29 00:00:00')",
         )
     ]
コード例 #16
0
ファイル: metrics.py プロジェクト: pombredanne/snuba
 def __init__(self) -> None:
     super().__init__(
         writable_storage_key=StorageKey.METRICS_DISTRIBUTIONS_BUCKETS,
         readable_storage_key=StorageKey.METRICS_DISTRIBUTIONS,
         value_schema=[
             Column(
                 "percentiles",
                 AggregateFunction("quantiles(0.5, 0.75, 0.9, 0.95, 0.99)",
                                   [Float(64)]),
             ),
             Column("min", AggregateFunction("min", [Float(64)])),
             Column("max", AggregateFunction("max", [Float(64)])),
             Column("avg", AggregateFunction("avg", [Float(64)])),
             Column("sum", AggregateFunction("sum", [Float(64)])),
             Column("count", AggregateFunction("count", [Float(64)])),
         ],
         mappers=TranslationMappers(columns=[
             ColumnToCurriedFunction(
                 None,
                 "percentiles",
                 FunctionCall(
                     None,
                     "quantilesMerge",
                     tuple(
                         Literal(None, quant)
                         for quant in [0.5, 0.75, 0.9, 0.95, 0.99]),
                 ),
                 (ColumnExpr(None, None, "percentiles"), ),
             ),
             merge_mapper("min"),
             merge_mapper("max"),
             merge_mapper("avg"),
             merge_mapper("sum"),
             merge_mapper("count"),
         ], ),
     )
コード例 #17
0
class Migration(migration.ClickhouseNodeMigration):
    blocking = False
    granularity = "2048"
    local_table_name = "generic_metric_sets_local"
    columns: Sequence[Column[Modifiers]] = [
        Column("org_id", UInt(64)),
        Column("project_id", UInt(64)),
        Column("metric_id", UInt(64)),
        Column("granularity", UInt(8)),
        Column("timestamp", DateTime(modifiers=Modifiers(codecs=["DoubleDelta"]))),
        Column("retention_days", UInt(16)),
        Column(
            "tags",
            Nested(
                [
                    ("key", UInt(64)),
                    ("indexed_value", UInt(64)),
                    ("raw_value", String()),
                ]
            ),
        ),
        Column("value", AggregateFunction("uniqCombined64", [UInt(64)])),
        Column("use_case_id", String(Modifiers(low_cardinality=True))),
    ]

    def forwards_local(self) -> Sequence[operations.SqlOperation]:
        return [
            operations.CreateTable(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name=self.local_table_name,
                engine=table_engines.AggregatingMergeTree(
                    storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                    order_by="(org_id, project_id, metric_id, granularity, timestamp, tags.key, tags.indexed_value, tags.raw_value, retention_days, use_case_id)",
                    primary_key="(org_id, project_id, metric_id, granularity, timestamp)",
                    partition_by="(retention_days, toMonday(timestamp))",
                    settings={"index_granularity": self.granularity},
                    ttl="timestamp + toIntervalDay(retention_days)",
                ),
                columns=self.columns,
            ),
            operations.AddColumn(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name=self.local_table_name,
                column=Column(
                    "_indexed_tags_hash",
                    Array(
                        UInt(64),
                        Modifiers(
                            materialized=hash_map_int_column_definition(
                                "tags.key", "tags.indexed_value"
                            )
                        ),
                    ),
                ),
            ),
            operations.AddColumn(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name=self.local_table_name,
                column=Column(
                    "_raw_tags_hash",
                    Array(
                        UInt(64),
                        Modifiers(
                            materialized=hash_map_int_key_str_value_column_definition(
                                "tags.key", "tags.raw_value"
                            )
                        ),
                    ),
                ),
            ),
            operations.AddIndex(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name=self.local_table_name,
                index_name="bf_indexed_tags_hash",
                index_expression="_indexed_tags_hash",
                index_type="bloom_filter()",
                granularity=1,
            ),
            operations.AddIndex(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name=self.local_table_name,
                index_name="bf_raw_tags_hash",
                index_expression="_raw_tags_hash",
                index_type="bloom_filter()",
                granularity=1,
            ),
            operations.AddIndex(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name=self.local_table_name,
                index_name="bf_tags_key_hash",
                index_expression="tags.key",
                index_type="bloom_filter()",
                granularity=1,
            ),
        ]

    def backwards_local(self) -> Sequence[operations.SqlOperation]:
        return [
            operations.DropTable(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name=self.local_table_name,
            )
        ]

    def forwards_dist(self) -> Sequence[operations.SqlOperation]:
        return [
            operations.CreateTable(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name="generic_metric_sets_aggregated_dist",
                engine=table_engines.Distributed(
                    local_table_name=self.local_table_name, sharding_key=None
                ),
                columns=self.columns,
            )
        ]

    def backwards_dist(self) -> Sequence[operations.SqlOperation]:
        return [
            operations.DropTable(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name="generic_metric_sets_aggregated_dist",
            )
        ]
コード例 #18
0
    Column("granularity", UInt(8)),
    Column("timestamp", DateTime()),
    Column("retention_days", UInt(16)),
    Column(
        "tags",
        Nested([("key", UInt(64)), ("indexed_value", UInt(64)),
                ("raw_value", String())]),
    ),
    Column("_raw_tags_hash", Array(UInt(64), SchemaModifiers(readonly=True))),
    Column("_indexed_tags_hash", Array(UInt(64),
                                       SchemaModifiers(readonly=True))),
]

sets_storage = ReadableTableStorage(
    storage_key=StorageKey.GENERIC_METRICS_SETS,
    storage_set_key=StorageSetKey.GENERIC_METRICS_SETS,
    schema=TableSchema(
        local_table_name="generic_metrics_sets_local",
        dist_table_name="generic_metrics_sets_dist",
        storage_set_key=StorageSetKey.GENERIC_METRICS_SETS,
        columns=ColumnSet([
            *aggregated_columns,
            Column("value", AggregateFunction("uniqCombined64", [UInt(64)])),
        ]),
    ),
    query_processors=[
        ArrayJoinKeyValueOptimizer("tags"),
        TableRateLimit(),
    ],
)
コード例 #19
0
raw_schema = WritableTableSchema(
    columns=all_columns,
    local_table_name=WRITE_LOCAL_TABLE_NAME,
    dist_table_name=WRITE_DIST_TABLE_NAME,
    storage_set_key=StorageSetKey.SESSIONS,
)

read_columns = ColumnSet([
    ("org_id", UInt(64)),
    ("project_id", UInt(64)),
    ("started", DateTime()),
    ("release", String()),
    ("environment", String()),
    (
        "duration_quantiles",
        AggregateFunction("quantilesIf(0.5, 0.9)", UInt(32), UInt(8)),
    ),
    ("sessions", AggregateFunction("countIf", UUID(), UInt(8))),
    ("users", AggregateFunction("uniqIf", UUID(), UInt(8))),
    (
        "sessions_crashed",
        AggregateFunction("countIf", UUID(), UInt(8)),
    ),
    (
        "sessions_abnormal",
        AggregateFunction("countIf", UUID(), UInt(8)),
    ),
    ("sessions_errored", AggregateFunction("uniqIf", UUID(), UInt(8))),
    ("users_crashed", AggregateFunction("uniqIf", UUID(), UInt(8))),
    ("users_abnormal", AggregateFunction("uniqIf", UUID(), UInt(8))),
    ("users_errored", AggregateFunction("uniqIf", UUID(), UInt(8))),
コード例 #20
0
new_raw_columns: Sequence[Tuple[Column[Modifiers], str]] = [
    (
        Column("quantity", UInt(32, Modifiers(default=str(DEFAULT_QUANTITY)))),
        "distinct_id",
    ),
    (Column("user_agent",
            String(Modifiers(low_cardinality=True))), "environment"),
    (Column("os", String(Modifiers(low_cardinality=True))), "user_agent"),
]

new_dest_columns: Sequence[Tuple[Column[Modifiers], str]] = [
    (Column("user_agent",
            String(Modifiers(low_cardinality=True))), "environment"),
    (Column("os", String(Modifiers(low_cardinality=True))), "user_agent"),
    (
        Column("duration_avg", AggregateFunction("avgIf",
                                                 [UInt(32), UInt(8)])),
        "duration_quantiles",
    ),
    (
        Column("sessions_preaggr",
               AggregateFunction("sumIf", [UInt(32), UInt(8)])),
        "sessions",
    ),
    (
        Column(
            "sessions_crashed_preaggr",
            AggregateFunction("sumIf", [UInt(32), UInt(8)]),
        ),
        "sessions_crashed",
    ),
    (
コード例 #21
0
ファイル: test_columns.py プロジェクト: getsentry/snuba
         [("key", String()), ("val", String(Modifier(nullable=True)))],
         Modifier(nullable=True),
     ),
     Nested([("key", String()), ("val", String())]),
     cast(
         Column[Modifier],
         Nested([("key", String()), ("val", String())],
                Modifier(nullable=True)),
     ),
     "Nullable(Nested(key String, val Nullable(String)))",
     id="nested",
 ),
 pytest.param(
     cast(
         Column[Modifier],
         AggregateFunction("uniqIf", [UInt(8), UInt(32)],
                           Modifier(nullable=True)),
     ),
     AggregateFunction("uniqIf", [UInt(8), UInt(32)]),
     cast(
         Column[Modifier],
         AggregateFunction("uniqIf", [UInt(8)], Modifier(nullable=True)),
     ),
     "Nullable(AggregateFunction(uniqIf, UInt8, UInt32))",
     id="aggregated",
 ),
 pytest.param(
     Enum([("a", 1), ("b", 2)], Modifier(nullable=True)),
     Enum([("a", 1), ("b", 2)]),
     Enum([("a", 1), ("b", 2)]),
     "Nullable(Enum('a' = 1, 'b' = 2))",
     id="enums",
コード例 #22
0
class Migration(migration.ClickhouseNodeMigration):
    blocking = False
    view_name = "generic_metric_sets_aggregation_mv"
    dest_table_columns: Sequence[Column[Modifiers]] = [
        Column("org_id", UInt(64)),
        Column("project_id", UInt(64)),
        Column("metric_id", UInt(64)),
        Column("granularity", UInt(8)),
        Column("timestamp",
               DateTime(modifiers=Modifiers(codecs=["DoubleDelta"]))),
        Column("retention_days", UInt(16)),
        Column(
            "tags",
            Nested([
                ("key", UInt(64)),
                ("indexed_value", UInt(64)),
                ("raw_value", String()),
            ]),
        ),
        Column("value", AggregateFunction("uniqCombined64", [UInt(64)])),
        Column("use_case_id", String(Modifiers(low_cardinality=True))),
    ]

    def forwards_local(self) -> Sequence[operations.SqlOperation]:
        return [
            operations.DropTable(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name=self.view_name,
            ),
            operations.CreateMaterializedView(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                view_name=self.view_name,
                columns=self.dest_table_columns,
                destination_table_name="generic_metric_sets_local",
                query="""
                SELECT
                    use_case_id,
                    org_id,
                    project_id,
                    metric_id,
                    arrayJoin(granularities) as granularity,
                    tags.key,
                    tags.indexed_value,
                    tags.raw_value,
                    toDateTime(multiIf(granularity=0,10,granularity=1,60,granularity=2,3600,granularity=3,86400,-1) *
                      intDiv(toUnixTimestamp(timestamp),
                             multiIf(granularity=0,10,granularity=1,60,granularity=2,3600,granularity=3,86400,-1))) as timestamp,
                    retention_days,
                    uniqCombined64State(arrayJoin(set_values)) as value
                FROM generic_metric_sets_raw_local
                WHERE materialization_version = 1
                  AND metric_type = 'set'
                GROUP BY
                    use_case_id,
                    org_id,
                    project_id,
                    metric_id,
                    tags.key,
                    tags.indexed_value,
                    tags.raw_value,
                    timestamp,
                    granularity,
                    retention_days
                """,
            ),
        ]

    def backwards_local(self) -> Sequence[operations.SqlOperation]:
        return [
            operations.DropTable(
                storage_set=StorageSetKey.GENERIC_METRICS_SETS,
                table_name=self.view_name,
            )
        ]

    def forwards_dist(self) -> Sequence[operations.SqlOperation]:
        return []

    def backwards_dist(self) -> Sequence[operations.SqlOperation]:
        return []
コード例 #23
0
test_data = [
    # Basic types
    (("Date", "", "", ""), Date()),
    (("DateTime", "", "", ""), DateTime()),
    (("Enum8('success' = 0, 'error' = 1)", "", "", ""),
     Enum([("success", 0), ("error", 1)])),
    (("FixedString(32)", "", "", ""), FixedString(32)),
    (("Float32", "", "", ""), Float(32)),
    (("IPv4", "", "", ""), IPv4()),
    (("IPv6", "", "", ""), IPv6()),
    (("String", "", "", ""), String()),
    (("UInt32", "", "", ""), UInt(32)),
    (("UUID", "", "", ""), UUID()),
    # Aggregate functions
    (("AggregateFunction(uniq, UInt8)", "", "", ""),
     AggregateFunction("uniq", UInt(8))),
    (("AggregateFunction(countIf, UUID, UInt8)", "", "", ""),
     AggregateFunction("countIf", UUID(), UInt(8))),
    (("AggregateFunction(quantileIf(0.5, 0.9), UInt32, UInt8)", "", "", ""),
     AggregateFunction("quantileIf(0.5, 0.9)", UInt(32), UInt(8))),
    # Array
    (("Array(String)", "", "", ""), Array(String())),
    (("Array(DateTime)", "", "", ""), Array(DateTime())),
    (("Array(UInt64)", "", "", ""), Array(UInt(64))),
    (("Array(Nullable(UUID))", "", "", ""), Array(Nullable(UUID()))),
    # Nullable
    (("Nullable(String)", "", "", ""), Nullable(String())),
    (("Nullable(FixedString(8))", "", "", ""), Nullable(FixedString(8))),
    (("Nullable(Date)", "", "", ""), Nullable(Date())),
    # Low cardinality
    (("LowCardinality(String)", "", "", ""), LowCardinality(String())),
コード例 #24
0
from typing import Sequence

from snuba.clickhouse.columns import AggregateFunction, Column, Float
from snuba.migrations import migration, operations
from snuba.migrations.columns import MigrationModifiers
from snuba.migrations.snuba_migrations.metrics.templates import (
    get_forward_migrations_dist,
    get_forward_migrations_local,
    get_reverse_table_migration,
)

COL_SCHEMA: Sequence[Column[MigrationModifiers]] = [
    Column(
        "percentiles",
        AggregateFunction("quantiles(0.5, 0.75, 0.9, 0.95, 0.99)",
                          [Float(64)]),
    ),
    Column("min", AggregateFunction("min", [Float(64)])),
    Column("max", AggregateFunction("max", [Float(64)])),
    Column("avg", AggregateFunction("avg", [Float(64)])),
    Column("sum", AggregateFunction("sum", [Float(64)])),
    Column("count", AggregateFunction("count", [Float(64)])),
]


class Migration(migration.ClickhouseNodeMigration):
    blocking = False

    def forwards_local(self) -> Sequence[operations.SqlOperation]:
        return get_forward_migrations_local(
            source_table_name="metrics_distributions_buckets_local",
コード例 #25
0
from snuba.migrations.columns import MigrationModifiers as Modifiers
from snuba.processor import MAX_UINT32, NIL_UUID
from .matview import create_matview_v1

aggregate_columns: Sequence[Column[Modifiers]] = [
    Column("org_id", UInt(64)),
    Column("project_id", UInt(64)),
    Column("started", DateTime()),
    Column("release", String(Modifiers(low_cardinality=True))),
    Column("environment", String(Modifiers(low_cardinality=True))),
    Column("user_agent", String(Modifiers(low_cardinality=True))),
    Column("os", String(Modifiers(low_cardinality=True))),
    # durations
    Column(
        "duration_quantiles",
        AggregateFunction("quantilesIf(0.5, 0.9)",
                          [UInt(32), UInt(8)]),
    ),
    Column("duration_avg", AggregateFunction("avgIf",
                                             [UInt(32), UInt(8)])),
    # sessions:
    Column("sessions", AggregateFunction("countIf", [UUID(), UInt(8)])),
    Column("sessions_preaggr", AggregateFunction("sumIf",
                                                 [UInt(32), UInt(8)])),
    Column("sessions_crashed", AggregateFunction("countIf",
                                                 [UUID(), UInt(8)])),
    Column("sessions_crashed_preaggr",
           AggregateFunction("sumIf", [UInt(32), UInt(8)])),
    Column("sessions_abnormal", AggregateFunction("countIf",
                                                  [UUID(), UInt(8)])),
    Column("sessions_abnormal_preaggr",
           AggregateFunction("sumIf", [UInt(32), UInt(8)])),
コード例 #26
0
    Column("timestamp", DateTime()),
    Column("retention_days", UInt(16)),
    Column("tags", Nested([("key", UInt(64)), ("value", UInt(64))])),
    Column("_tags_hash", Array(UInt(64), SchemaModifiers(readonly=True))),
]

sets_storage = ReadableTableStorage(
    storage_key=StorageKey.METRICS_SETS,
    storage_set_key=StorageSetKey.METRICS,
    schema=TableSchema(
        local_table_name="metrics_sets_local",
        dist_table_name="metrics_sets_dist",
        storage_set_key=StorageSetKey.METRICS,
        columns=ColumnSet([
            *aggregated_columns,
            Column("value", AggregateFunction("uniqCombined64", [UInt(64)])),
        ]),
    ),
    query_processors=[ArrayJoinKeyValueOptimizer("tags")],
)

counters_storage = ReadableTableStorage(
    storage_key=StorageKey.METRICS_COUNTERS,
    storage_set_key=StorageSetKey.METRICS,
    schema=TableSchema(
        local_table_name="metrics_counters_local",
        dist_table_name="metrics_counters_dist",
        storage_set_key=StorageSetKey.METRICS,
        columns=ColumnSet([
            *aggregated_columns,
            Column("value", AggregateFunction("sum", [Float(64)])),