def test_handle_data_update_event( self, producer, test_table, test_topic, first_test_kafka_offset, second_test_kafka_offset, data_event_handler, data_update_events, schema_wrapper_entry, patches, patch_get_payload_schema, patch_message_topic, position): expected_call_args = [] for data_event in data_update_events: position = LogPosition(log_file='binlog', log_pos=100) upstream_position_info = { "position": position.to_dict(), "cluster_name": "yelp_main", "database_name": "fake_database", "table_name": "fake_table" } data_event_handler.handle_event(data_event, position) expected_call_args.append( UpdateMessage( payload_data=data_event.row['after_values'], schema_id=schema_wrapper_entry.schema_id, upstream_position_info=upstream_position_info, previous_payload_data=data_event.row["before_values"], keys=(u'primary_key', ), timestamp=data_event.timestamp)) actual_call_args = [i[0][0] for i in producer.publish.call_args_list] self._assert_messages_as_expected(expected_call_args, actual_call_args)
def _publish_then_consume_message(self, consumer, avro_schema): with Producer('test_producer', team_name='bam', expected_frequency_seconds=ExpectedFrequency.constantly, monitoring_enabled=False) as producer: message = UpdateMessage(schema_id=avro_schema.schema_id, payload_data={'id': 2}, previous_payload_data={'id': 1}) producer.publish(message) producer.flush() consumer.get_messages(1, blocking=True, timeout=TIMEOUT)
def update_message(self, payload, registered_schema): return UpdateMessage(schema_id=registered_schema.schema_id, payload=payload, previous_payload=payload)