def get_outcomes_consumer(concurrency=None, **options): """ Handles outcome requests coming via a kafka queue from Relay. """ return create_batching_kafka_consumer( topic_name=settings.KAFKA_OUTCOMES, worker=OutcomesConsumerWorker(concurrency=concurrency), **options)
def get_profiles_consumer( topic: str, **options: Dict[str, str], ) -> BatchingKafkaConsumer: return create_batching_kafka_consumer( {topic}, worker=ProfilesConsumer(), **options, )
def get_ingest_consumer(consumer_types, once=False, **options): """ Handles events coming via a kafka queue. The events should have already been processed (normalized... ) upstream (by Relay). """ topic_names = {ConsumerType.get_topic_name(consumer_type) for consumer_type in consumer_types} return create_batching_kafka_consumer( topic_names=topic_names, worker=IngestConsumerWorker(), **options )
def get_metrics_consumer(topic: Optional[str] = None, **options: Dict[str, str]) -> BatchingKafkaConsumer: snuba_metrics = settings.KAFKA_TOPICS[settings.KAFKA_SNUBA_METRICS] snuba_metrics_producer = Producer( kafka_config.get_kafka_producer_cluster_options( snuba_metrics["cluster"]), ) return create_batching_kafka_consumer( {topic}, worker=MetricsIndexerWorker(producer=snuba_metrics_producer), **options, )