def run_test( self, subscriptions: Collection[Subscription], start: timedelta, end: timedelta, expected: Collection[ScheduledSubscriptionTask], sort_key: Optional[Callable[[ScheduledSubscriptionTask], Tuple[datetime, uuid.UUID]]] = None, ) -> None: tick = self.build_tick(start, end) store = RedisSubscriptionDataStore( redis_client, self.entity_key, self.partition_id, ) for subscription in subscriptions: store.create(subscription.identifier.uuid, subscription.data) scheduler = SubscriptionScheduler( EntityKey.EVENTS, store, self.partition_id, timedelta(minutes=1), DummyMetricsBackend(strict=True), ) result = list(scheduler.find(tick)) if sort_key: result.sort(key=sort_key) assert result == expected
def run_test( self, subscriptions: Collection[Subscription], start: timedelta, end: timedelta, expected: Collection[ScheduledTask[Subscription]], sort_key=None, ) -> None: store = RedisSubscriptionDataStore( redis_client, self.dataset, self.partition_id, ) for subscription in subscriptions: store.create(subscription.identifier.uuid, subscription.data) scheduler = SubscriptionScheduler( store, self.partition_id, timedelta(minutes=1), DummyMetricsBackend(strict=True), ) result = list(scheduler.find(self.build_interval(start, end))) if sort_key: result.sort(key=sort_key) assert result == expected
def create_subscription() -> None: store = RedisSubscriptionDataStore(redis_client, EntityKey.EVENTS, PartitionId(0)) store.create( uuid.uuid4(), SubscriptionData( project_id=1, time_window_sec=60, resolution_sec=60, query="MATCH (events) SELECT count()", entity_subscription=EventsSubscription(data_dict={}), ), )
def test_scheduler_consumer() -> None: settings.TOPIC_PARTITION_COUNTS = {"events": 2} importlib.reload(scheduler_consumer) admin_client = AdminClient(get_default_kafka_configuration()) create_topics(admin_client, [SnubaTopic.COMMIT_LOG]) metrics_backend = TestingMetricsBackend() entity_name = "events" entity = get_entity(EntityKey(entity_name)) storage = entity.get_writable_storage() assert storage is not None stream_loader = storage.get_table_writer().get_stream_loader() commit_log_topic = Topic("snuba-commit-log") mock_scheduler_producer = mock.Mock() from snuba.redis import redis_client from snuba.subscriptions.data import PartitionId, SubscriptionData from snuba.subscriptions.entity_subscription import EventsSubscription from snuba.subscriptions.store import RedisSubscriptionDataStore entity_key = EntityKey(entity_name) partition_index = 0 store = RedisSubscriptionDataStore(redis_client, entity_key, PartitionId(partition_index)) store.create( uuid.uuid4(), SubscriptionData( project_id=1, time_window_sec=60, resolution_sec=60, query="MATCH events SELECT count()", entity_subscription=EventsSubscription(data_dict={}), ), ) builder = scheduler_consumer.SchedulerBuilder( entity_name, str(uuid.uuid1().hex), "events", mock_scheduler_producer, "latest", False, 60 * 5, None, None, metrics_backend, ) scheduler = builder.build_consumer() time.sleep(2) scheduler._run_once() scheduler._run_once() scheduler._run_once() epoch = datetime(1970, 1, 1) producer = KafkaProducer( build_kafka_producer_configuration( stream_loader.get_default_topic_spec().topic, )) for (partition, offset, orig_message_ts) in [ (0, 0, epoch), (1, 0, epoch + timedelta(minutes=1)), (0, 1, epoch + timedelta(minutes=2)), (1, 1, epoch + timedelta(minutes=3)), ]: fut = producer.produce( commit_log_topic, payload=commit_codec.encode( Commit( "events", Partition(commit_log_topic, partition), offset, orig_message_ts, )), ) fut.result() producer.close() for _ in range(5): scheduler._run_once() scheduler._shutdown() assert mock_scheduler_producer.produce.call_count == 2 settings.TOPIC_PARTITION_COUNTS = {}