Beispiel #1
0
 def producer_instance(self, containers, producer_name, use_work_pool,
                       team_name):
     return Producer(
         producer_name=producer_name,
         team_name=team_name,
         expected_frequency_seconds=ExpectedFrequency.constantly,
         use_work_pool=use_work_pool)
    def produce_messages(cls, **kwargs):
        def job(producer_, source_, symbol_):
            if source_ == SOURCE_GOOGLE_FINANCE:
                message = Api.get_google_finance_quotes(symbol_)
            else:
                # should never reach here
                return

            producer_.send(message)

        def shut_down(producer_):
            producer_.shut_down()

        source = kwargs.get(SOURCE)

        if source not in VALID_SOURCES:
            raise Exception('source {} is not valid'.format(source))

        # get producer
        symbols = kwargs.get(SYMBOLS).split(',') or [DEFAULT_SYMBOL]
        kafka_broker = kwargs.get(KAFKA_BROKER) or DEFAULT_KAFKA_BROKER
        kafka_topic = kwargs.get(KAFKA_TOPIC) or DEFAULT_KAFKA_TOPIC

        threads = []
        for symbol in symbols:
            producer = Producer(kafka_broker, kafka_topic)
            atexit.register(shut_down, producer)
            scheduler = Scheduler(3, job, producer, source, symbol)
            threads.append(threading.Thread(target=scheduler.run))

        for t in threads:
            t.start()

        for t in threads:
            t.join()
Beispiel #3
0
 def test_monitoring_system_dry_run(self, producer_name, team_name):
     producer = Producer(
         producer_name=producer_name,
         team_name=team_name,
         expected_frequency_seconds=ExpectedFrequency.constantly,
         dry_run=True)
     assert producer.monitor.dry_run is True
Beispiel #4
0
 def dp_producer(self, team_name):
     with Producer(
         producer_name='producer_1',
         team_name=team_name,
         expected_frequency_seconds=ExpectedFrequency.constantly,
         use_work_pool=False
     ) as producer:
         yield producer
Beispiel #5
0
 def producer(self, team_name):
     instance = Producer(
         producer_name="tailer_producer",
         team_name=team_name,
         expected_frequency_seconds=ExpectedFrequency.constantly,
         use_work_pool=False,
         monitoring_enabled=False)
     with instance as producer:
         yield producer
Beispiel #6
0
 def producer(self, containers):
     instance = Producer(
         producer_name="introspector_producer",
         team_name="bam",
         expected_frequency_seconds=ExpectedFrequency.constantly,
         use_work_pool=False,
         monitoring_enabled=False)
     with instance as producer:
         yield producer
Beispiel #7
0
 def test_producer_initial_registration_messages(self, use_work_pool):
     with attach_spy_on_func(clog, 'log_line') as func_spy:
         with Producer(
                 producer_name='producer_1',
                 team_name='bam',
                 expected_frequency_seconds=ExpectedFrequency.constantly,
                 use_work_pool=use_work_pool,
                 schema_id_list=[1, 2, 3]):
             assert func_spy.call_count == 3
    def __init__(self, name, broker, source_topic, destination_topic):
        sc = SparkContext("local[2]", name)
        sc.setLogLevel('ERROR')
        self.ssc = StreamingContext(sc, 5)

        directKafkaStream = KafkaUtils.createDirectStream(
            self.ssc, [source_topic], {'metadata.broker.list': broker})

        producer = Producer(broker, destination_topic)
        process_stream(directKafkaStream, producer)
 def _setup_producer(self):
     save_position_callback = partial(
         save_position, state_session=self.db_connections.state_session)
     with Producer(
             producer_name=REPLICATION_HANDLER_PRODUCER_NAME,
             team_name=REPLICATION_HANDLER_TEAM_NAME,
             expected_frequency_seconds=ExpectedFrequency.constantly,
             monitoring_enabled=False,
             dry_run=self.publish_dry_run,
             position_data_callback=save_position_callback,
     ) as producer:
         yield producer
    def _publish_then_consume_message(self, consumer, avro_schema):
        with Producer('test_producer',
                      team_name='bam',
                      expected_frequency_seconds=ExpectedFrequency.constantly,
                      monitoring_enabled=False) as producer:
            message = UpdateMessage(schema_id=avro_schema.schema_id,
                                    payload_data={'id': 2},
                                    previous_payload_data={'id': 1})
            producer.publish(message)
            producer.flush()

        consumer.get_messages(1, blocking=True, timeout=TIMEOUT)
Beispiel #11
0
    def test_position_data_callback(self, create_message, producer_name,
                                    team_name):
        callback = mock.Mock()
        producer = Producer(
            producer_name=producer_name,
            team_name=team_name,
            expected_frequency_seconds=ExpectedFrequency.constantly,
            position_data_callback=callback)
        upstream_info = {'offset': 'fake'}
        message = create_message(upstream_position_info=upstream_info)
        with setup_capture_new_messages_consumer(message.topic) as consumer:
            producer.publish(message)
            producer.flush()
            (position_data, ), _ = callback.call_args

            self._verify_position_data(position_data, upstream_info,
                                       message.topic)
            self._verify_topic_kafka_offset(position_data, message.topic,
                                            consumer, producer, create_message)