def get_forward_migrations_local( source_table_name: str, table_name: str, mv_name: str, aggregation_col_schema: Sequence[Column[Modifiers]], aggregation_states: str, granularity: int, ) -> Sequence[operations.SqlOperation]: aggregated_cols = [*COMMON_AGGR_COLUMNS, *aggregation_col_schema] return [ operations.CreateTable( storage_set=StorageSetKey.METRICS, table_name=table_name, columns=aggregated_cols, engine=table_engines.AggregatingMergeTree( storage_set=StorageSetKey.METRICS, order_by= "(org_id, project_id, metric_id, granularity, timestamp, tags.key, tags.value)", partition_by="(retention_days, toMonday(timestamp))", settings={"index_granularity": "256"}, ), ), operations.AddColumn( storage_set=StorageSetKey.METRICS, table_name=table_name, column=Column( "_tags_hash", Array(UInt(64), Modifiers(materialized=INT_TAGS_HASH_MAP_COLUMN)), ), after="tags.value", ), operations.AddIndex( storage_set=StorageSetKey.METRICS, table_name=table_name, index_name="bf_tags_hash", index_expression="_tags_hash", index_type="bloom_filter()", granularity=1, ), operations.AddIndex( storage_set=StorageSetKey.METRICS, table_name=table_name, index_name="bf_tags_key_hash", index_expression="tags.key", index_type="bloom_filter()", granularity=1, ), ] + [ get_forward_view_migration_local( source_table_name, table_name, mv_name, aggregation_col_schema, aggregation_states, granularity, ) ]
def forwards_local(self) -> Sequence[operations.SqlOperation]: return [ operations.CreateTable( storage_set=StorageSetKey.METRICS, table_name=self.table_name, columns=self.aggregated_cols, engine=table_engines.AggregatingMergeTree( storage_set=StorageSetKey.METRICS, order_by= "(use_case_id, org_id, project_id, metric_id, granularity, timestamp, tags.key, tags.value, retention_days)", primary_key= "(use_case_id, org_id, project_id, metric_id, granularity, timestamp)", partition_by="(retention_days, toMonday(timestamp))", settings={"index_granularity": self.granularity}, ttl="timestamp + toIntervalDay(retention_days)", ), ), operations.AddColumn( storage_set=StorageSetKey.METRICS, table_name=self.table_name, column=Column( "_tags_hash", Array(UInt(64), Modifiers(materialized=INT_TAGS_HASH_MAP_COLUMN)), ), after="tags.value", ), operations.AddIndex( storage_set=StorageSetKey.METRICS, table_name=self.table_name, index_name="bf_tags_hash", index_expression="_tags_hash", index_type="bloom_filter()", granularity=1, ), operations.AddIndex( storage_set=StorageSetKey.METRICS, table_name=self.table_name, index_name="bf_tags_key_hash", index_expression="tags.key", index_type="bloom_filter()", granularity=1, ), ]
def forwards_local(self) -> Sequence[operations.Operation]: return [ operations.AddIndex( storage_set=StorageSetKey.TRANSACTIONS, table_name="transactions_local", index_name="minmax_timestamp", index_expression="timestamp", index_type="minmax", granularity=1, ), ]
def forwards_local(self) -> Sequence[operations.SqlOperation]: return [ operations.CreateTable( storage_set=StorageSetKey.REPLAYS, table_name="replays_local", columns=raw_columns, engine=table_engines.ReplacingMergeTree( storage_set=StorageSetKey.REPLAYS, order_by= "(project_id, toStartOfDay(timestamp), cityHash64(replay_id), sequence_id)", partition_by="(retention_days, toMonday(timestamp))", settings={"index_granularity": "8192"}, ttl="timestamp + toIntervalDay(retention_days)", ), ), operations.AddIndex( storage_set=StorageSetKey.REPLAYS, table_name="replays_local", index_name="bf_trace_ids_hashed", index_expression="_trace_ids_hashed", index_type="bloom_filter()", granularity=1, ), ]
def forwards_local(self) -> Sequence[operations.SqlOperation]: return [ operations.CreateTable( storage_set=StorageSetKey.GENERIC_METRICS_SETS, table_name=self.local_table_name, engine=table_engines.AggregatingMergeTree( storage_set=StorageSetKey.GENERIC_METRICS_SETS, order_by="(org_id, project_id, metric_id, granularity, timestamp, tags.key, tags.indexed_value, tags.raw_value, retention_days, use_case_id)", primary_key="(org_id, project_id, metric_id, granularity, timestamp)", partition_by="(retention_days, toMonday(timestamp))", settings={"index_granularity": self.granularity}, ttl="timestamp + toIntervalDay(retention_days)", ), columns=self.columns, ), operations.AddColumn( storage_set=StorageSetKey.GENERIC_METRICS_SETS, table_name=self.local_table_name, column=Column( "_indexed_tags_hash", Array( UInt(64), Modifiers( materialized=hash_map_int_column_definition( "tags.key", "tags.indexed_value" ) ), ), ), ), operations.AddColumn( storage_set=StorageSetKey.GENERIC_METRICS_SETS, table_name=self.local_table_name, column=Column( "_raw_tags_hash", Array( UInt(64), Modifiers( materialized=hash_map_int_key_str_value_column_definition( "tags.key", "tags.raw_value" ) ), ), ), ), operations.AddIndex( storage_set=StorageSetKey.GENERIC_METRICS_SETS, table_name=self.local_table_name, index_name="bf_indexed_tags_hash", index_expression="_indexed_tags_hash", index_type="bloom_filter()", granularity=1, ), operations.AddIndex( storage_set=StorageSetKey.GENERIC_METRICS_SETS, table_name=self.local_table_name, index_name="bf_raw_tags_hash", index_expression="_raw_tags_hash", index_type="bloom_filter()", granularity=1, ), operations.AddIndex( storage_set=StorageSetKey.GENERIC_METRICS_SETS, table_name=self.local_table_name, index_name="bf_tags_key_hash", index_expression="tags.key", index_type="bloom_filter()", granularity=1, ), ]