def __init__(self,
                 producer_name,
                 value_schema,
                 groupID='KafkaAvroProducer'):

        # Consumer name for logging purposes
        self.logging_prefix = '[' + producer_name + '][KafkaAvroProducer]'

        # Schema Registry configuration
        self.schema_registry_conf = EventBackboneConfig.getSchemaRegistryConf()
        # Schema Registry Client
        self.schema_registry_client = SchemaRegistryClient(
            self.schema_registry_conf)

        # String Serializer for the key
        self.key_serializer = StringSerializer('utf_8')
        # Avro Serializer for the value
        self.value_serializer = AvroSerializer(value_schema,
                                               self.schema_registry_client)

        # Get the producer configuration
        self.producer_conf = EventBackboneConfig.getProducerConfiguration(
            groupID, self.key_serializer, self.value_serializer)
        EventBackboneConfig.printProducerConfiguration(
            self.logging_prefix, self.producer_conf,
            self.schema_registry_conf['url'])
        # Create the producer
        self.producer = SerializingProducer(self.producer_conf)
 def produceData(self):
     # self.produceInventoryDataFromFile(EventBackboneConfig.getInventoryTopicName())
     # self.produceReeferDataFromFile(EventBackboneConfig.getReeferTopicName())
     # self.produceTransportationDataFromFile(EventBackboneConfig.getTransportationTopicName())
     self.produceInventoryData(EventBackboneConfig.getInventoryTopicName())
     self.produceReeferData(EventBackboneConfig.getReeferTopicName())
     self.produceTransportationData(
         EventBackboneConfig.getTransportationTopicName())
Exemplo n.º 3
0
 def __init__(self):
     # logging.info("[InventoryConsumer] - Initializing the consumer")
     print("[InventoryConsumer] - Initializing the consumer")
     self.cloudEvent_schema = avroUtils.getCloudEventSchema()
     self.store = InventoryDataStore()
     self.kafkaconsumer = KafkaAvroConsumer(
         'InventoryConsumer', json.dumps(self.cloudEvent_schema.to_json()),
         EventBackboneConfig.getInventoryTopicName(),
         EventBackboneConfig.getConsumerGroup(), AUTO_COMMIT)
    def produceShipmentPlan(self, sol_json):
        print('\n[OrderConsumer] - producing shipment plan')

        shipments = json.loads(sol_json['Shipments'])
        shipments_clean = []
        if len(shipments) > 0:
            for shipment in shipments:
                shipments_clean.append(
                    {x.replace(' ', ''): v
                     for x, v in shipment.items()})
            event_json = {}
            event_json[
                'type'] = "ibm.gse.eda.vaccine.orderoptimizer.VaccineOrderCloudEvent"
            event_json['specversion'] = "1.0"
            event_json['source'] = "Vaccine Order Optimizer engine"
            event_json['id'] = str(uuid.uuid4())
            event_json['time'] = datetime.datetime.now(
                datetime.timezone.utc).isoformat()
            event_json[
                'dataschema'] = "https://raw.githubusercontent.com/ibm-cloud-architecture/vaccine-order-optimizer/master/data/avro/schemas/shipment_plan.avsc"
            event_json['datacontenttype'] = "application/json"
            event_json['data'] = {"Shipments": shipments_clean}
            if self.debugOptimization:
                print('[OrderConsumer] - Shipment Plan event to be produced:')
                print(event_json)
            self.kafkaproducer.publishEvent(
                event_json['id'], event_json,
                EventBackboneConfig.getShipmentPlanTopicName())
        else:
            print('[ERROR] - There is no shipment plan.')
 def __init__(self, inventoryStore, reeferStore, transportationStore):
     print("[OrderConsumer] - Initializing the consumer")
     self.debugOptimization = self.debugOptimization()
     self.orderStore = OrderDataStore.getInstance()
     self.reeferStore = reeferStore
     self.inventoryStore = inventoryStore
     self.transporationStore = transportationStore
     self.kafkaconsumer = KafkaAvroCDCConsumer(
         'OrderConsumer', EventBackboneConfig.getOrderTopicName(),
         EventBackboneConfig.getConsumerGroup(), AUTO_COMMIT)
     # Avro data schemas location
     self.schemas_location = "/app/data/avro/schemas/"
     # CloudEvent Schema
     self.cloudEvent_schema = avroUtils.getCloudEventSchema()
     # Build the Kafka Avro Producers
     self.kafkaproducer = KafkaAvroProducer(
         "OrderConsumer", json.dumps(self.cloudEvent_schema.to_json()),
         "VOO-ShipmentPlan")
    def __init__(self, consumer_name, value_schema, topic_name = "kafka-avro-producer", groupID = 'KafkaAvroConsumer', autocommit = True):

        # Consumer name for logging purposes
        self.logging_prefix = '['+ consumer_name + '][KafkaAvroConsumer]'

        # Schema Registry configuration
        self.schema_registry_conf = EventBackboneConfig.getSchemaRegistryConf()
        # Schema Registry Client
        self.schema_registry_client = SchemaRegistryClient(self.schema_registry_conf)
 
 
        # Key Deserializer
        self.key_deserializer = StringDeserializer('utf_8')

         # Get Schema for the value
        self.schema_id_value = self.schema_registry_client.get_latest_version(topic_name + "-value").schema_id
        # print('The Schema ID for the value is: {}'.format(self.schema_id_value))
        self.value_schema = self.schema_registry_client.get_schema(self.schema_id_value).schema_str
        print(self.logging_prefix + ' - Value Subject: {}'.format(topic_name))
        print(self.logging_prefix + ' - Value Schema:')
        print(self.logging_prefix + ' - -------------\n')
        print(self.logging_prefix + ' - ' + self.value_schema + '\n')

        # Value Deserializer
        # Presenting the schema to the Avro Deserializer is needed at the moment. In the future it might change
        # https://github.com/confluentinc/confluent-kafka-python/issues/834
        self.value_deserializer = AvroDeserializer(self.value_schema,self.schema_registry_client)

        # Get the consumer configuration
        self.consumer_conf = EventBackboneConfig.getConsumerConfiguration(groupID, autocommit, 
                                                                        self.key_deserializer,
                                                                        self.value_deserializer)
        # Create the consumer
        self.consumer = DeserializingConsumer(self.consumer_conf)

        # Print consumer configuration
        EventBackboneConfig.printConsumerConfiguration(self.logging_prefix,self.consumer_conf,self.schema_registry_conf['url'])

        # Subscribe to the topic
        self.consumer.subscribe([topic_name])