class TestIntegration(unittest.TestCase): def setUp(self): data_path = os.path.abspath( os.path.join(os.path.dirname(__file__), 'data')) schema_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '../schemas/distribution_schema_0p2.avsc')) r = AlertReader(data_path) alerts = r.to_list() conf = load_credentials(tmp=True) kafka_servers = conf['servers'] p = confluent_kafka.Producer({'bootstrap.servers': kafka_servers}) for alert in alerts: avro_data = encode_into_avro(alert, schema_path) topic = get_legal_topic_name(alert['cdsxmatch']) p.produce(topic, avro_data) p.flush() # instantiate an AlertConsumer mytopics = conf["mytopics"] myconfig = { 'bootstrap.servers': kafka_servers, 'group_id': conf['group_id'] } self.consumer = AlertConsumer(mytopics, myconfig, schema_path=schema_path) def test_poll(self): topic, alert, key = self.consumer.poll() self.assertIsNotNone(alert) self.assertTrue(fastavro.validate(alert, self.consumer._parsed_schema)) def test_consume(self): num_messages = 1 alerts = self.consumer.consume(num_messages) self.assertEqual(len(alerts), num_messages) def test_topics(self): topics = self.consumer.available_topics() self.assertTrue('rrlyr' in topics.keys()) def test_broker_name(self): brokers = self.consumer.available_brokers() self.assertTrue(0 in brokers.keys()) def tearDown(self): self.consumer.close()
class TestIntegration(unittest.TestCase): def setUp(self): data_path = os.path.abspath(os.path.join( os.path.dirname(__file__), 'data')) schema_path = os.path.abspath(os.path.join( os.path.dirname(__file__), '../schemas/distribution_schema_0p2.avsc')) r = AlertReader(data_path) alerts = r.to_list() kafka_servers = 'localhost:9093, localhost:9094, localhost:9095' p = confluent_kafka.Producer({ 'bootstrap.servers': kafka_servers}) for alert in alerts: avro_data = encode_into_avro(alert, schema_path) topic = get_legal_topic_name(alert['cdsxmatch']) p.produce(topic, avro_data) p.flush() # instantiate an AlertConsumer mytopics = ["rrlyr"] myconfig = { 'bootstrap.servers': kafka_servers, 'group_id': 'test_group'} self.consumer = AlertConsumer(mytopics, myconfig, schema=schema_path) def test_poll(self): topic, alert = self.consumer.poll() self.assertIsNotNone(alert) self.assertTrue(fastavro.validate(alert, self.consumer._parsed_schema)) def test_consume(self): num_messages = 1 alerts = self.consumer.consume(num_messages) self.assertEqual(len(alerts), num_messages) def tearDown(self): self.consumer.close()
class TestIntegration(unittest.TestCase): def setUp(self): data_path = os.path.abspath( os.path.join(os.path.dirname(__file__), 'data')) alert_reader = read_avro_alerts(data_path) kafka_servers = 'localhost:9093, localhost:9094, localhost:9095' p = confluent_kafka.Producer({'bootstrap.servers': kafka_servers}) for alert in alert_reader: avro_data = encode_into_avro(alert) topic = get_legal_topic_name(alert['cross_match_alerts_per_batch']) p.produce(topic, avro_data) p.flush() # instantiate an AlertConsumer mytopics = ["rrlyr", "ebwuma", "unknown"] test_schema = os.path.abspath( os.path.join(os.path.dirname(__file__), "test_schema.avsc")) myconfig = { 'bootstrap.servers': kafka_servers, 'group_id': 'test_group' } self.consumer = AlertConsumer(mytopics, myconfig, schema=test_schema) def test_poll(self): alert, topic = self.consumer.poll() self.assertIsNotNone(alert) def test_consume(self): num_messages = 3 alerts = self.consumer.consume(num_messages) self.assertEqual(len(alerts), num_messages) def tearDown(self): self.consumer.close()