def test_ingest_consumer_reads_from_topic_and_calls_celery_task( task_runner, kafka_producer, kafka_admin ): consumer_group = "test-consumer" admin = kafka_admin(settings) admin.delete_events_topic() producer = kafka_producer(settings) organization = Factories.create_organization() project = Factories.create_project(organization=organization) topic_event_name = ConsumerType.get_topic_name(ConsumerType.Events, settings) event_ids = set() for i in range(1, 4): message, event_id = _get_test_message(project) event_ids.add(event_id) producer.produce(topic_event_name, message) with task_runner(): run_ingest_consumer( commit_batch_size=2, consumer_group=consumer_group, consumer_type=ConsumerType.Events, max_batch_time_seconds=0.1, is_shutdown_requested=_shutdown_requested(max_secs=10, num_events=3), ) # check that we got the messages assert Event.objects.count() == 3 for event_id in event_ids: message = Event.objects.get(event_id=event_id) assert message is not None # check that the data has not been scrambled assert message.data["extra"]["the_id"] == event_id
def _setup_outcome_test(kafka_producer, kafka_admin): topic_name = _get_outcome_topic_name() organization = Factories.create_organization() project = Factories.create_project(organization=organization) project_id = project.id producer = kafka_producer(settings) admin = kafka_admin(settings) admin.delete_topic(topic_name) return producer, project_id, topic_name
def __init__(self, kafka_producer, kafka_admin, task_runner): self.events_filtered = [] self.events_discarded = [] self.events_dropped = [] self.events_saved = [] event_filtered.connect(self._event_filtered_receiver) event_discarded.connect(self._event_discarded_receiver) event_dropped.connect(self._event_dropped_receiver) event_saved.connect(self._event_saved_receiver) self.task_runner = task_runner self.topic_name = settings.KAFKA_OUTCOMES self.organization = Factories.create_organization() self.project = Factories.create_project(organization=self.organization) self.producer = self._create_producer(kafka_producer, kafka_admin)
def test_ingest_consumer_reads_from_topic_and_calls_celery_task( task_runner, kafka_producer, kafka_admin, requires_kafka): group_id = "test-consumer" topic_event_name = ConsumerType.get_topic_name(ConsumerType.Events) admin = kafka_admin(settings) admin.delete_topic(topic_event_name) producer = kafka_producer(settings) organization = Factories.create_organization() project = Factories.create_project(organization=organization) event_ids = set() for _ in range(3): message, event_id = _get_test_message(project) event_ids.add(event_id) producer.produce(topic_event_name, message) consumer = get_ingest_consumer( max_batch_size=2, max_batch_time=5000, group_id=group_id, consumer_type=ConsumerType.Events, auto_offset_reset="earliest", ) with task_runner(): i = 0 while i < MAX_POLL_ITERATIONS: if eventstore.get_event_by_id(project.id, event_id): break consumer._run_once() i += 1 # check that we got the messages for event_id in event_ids: message = eventstore.get_event_by_id(project.id, event_id) assert message is not None # check that the data has not been scrambled assert message.data["extra"]["the_id"] == event_id
def create_organization(self, *args, **kwargs): return Factories.create_organization(*args, **kwargs)
def create_organization(self, *args, **kwargs): return Factories.create_organization(*args, **kwargs)