def test2_sendAnomalyEvents(self): print('----------------------------------') print('-- [TEST] : Send Anomaly Events --') print('----------------------------------\n') print("1 - Load Container Anomaly event from file") # Open file to read f = open('../data/containerAnomalyEvent.json', 'r') # Load container anomaly event container_anomaly_event = json.load(f) # Verify we have read a file self.assertIsNotNone(container_anomaly_event) # Assign the containerID container_anomaly_event['containerID'] = CONTAINER_ID print("The container anomaly event to produce is :") print(json.dumps(container_anomaly_event, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print("2 - Post container anomaly into the containers topic") # Create a KafkaProducer object to interact with Kafka/Event Streams kp = KafkaProducer() # Verify we have a KafkaProducer object self.assertIsNotNone(kp) kp.prepareProducer("ProduceContainerPython") # Verify the producer has been created self.assertIsNotNone(kp.producer) # Publish the container anomaly event for i in range(3): print('Posting container anomaly event number ' + str(i)) kp.publishEvent(CONTAINERS_TOPIC, container_anomaly_event, "containerID") time.sleep(3) print("Done\n")
def test1_createContainer(self): print('-------------------------------') print('-- [TEST] : Create container --') print('-------------------------------\n') print("1 - Load the container event from json file") # Open file to read f = open('../data/containerCreateEvent.json','r') # Load the container from file new_container = json.load(f) # Verify we have read a container self.assertIsNotNone(new_container) # Provide the timestamp for the creation time of the container/event new_container['timestamp'] = int(time.time()) # Verify the container has a valid timestamp self.assertGreater(new_container['timestamp'],0) # Provide the container ID new_container['containerID'] = CONTAINER_ID new_container['payload']['containerID'] = CONTAINER_ID print("Container event to be sent:") print(json.dumps(new_container, indent=4, sort_keys=True)) # Close file f.close() print("Done\n") print("2 - Post container event into the containers topic") # Create a KafkaProducer object to interact with Kafka/Event Streams kp = KafkaProducer(KAFKA_ENV,KAFKA_BROKERS,KAFKA_APIKEY) # Verify we have a KafkaProducer object self.assertIsNotNone(kp) kp.prepareProducer("ProduceContainerPython") # Verify the producer has been created self.assertIsNotNone(kp.producer) # Publish the create container event kp.publishEvent(CONTAINERS_TOPIC,new_container,"containerID") print("Done\n") print("Sleeping for 5 secs\n") time.sleep(5) print("3 - Read container event from the containers topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV,KAFKA_BROKERS,KAFKA_APIKEY,CONTAINERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key read_container = kc.pollNextEventByKey(CONTAINER_ID) # A container event object is read self.assertIsNotNone(read_container) print("This is the container event read:") print(json.dumps(read_container, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("4 - Compare events") # Verify new container event sent and container event read from the topic are the same self.assertEqual(sorted(new_container.items()),sorted(read_container.items())) print("Done\n") print("5 - Read container object from the container microservice's API endpoint") response = requests.get("http://" + CONTAINER_SPRING_MS + "/containers") # Verify we get a response self.assertIsNotNone(response) # Load containers from the response json_data = json.loads(response.text) # Verify we get at least one container back self.assertGreater(len(json_data['content']),0) # Get latest container api_container = json_data['content'][len(json_data['content'])-1] # Verify we have a container self.assertIsNotNone(api_container) print("This is the API container object") print(json.dumps(api_container, indent=4, sort_keys=True)) print("Done\n") print("6 - Read expected empty container from json file") # Open file to read f2 = open('../data/containerEmptyEvent.json','r') # Load the expected container object expected_container = json.load(f2) # Verify we have a container self.assertIsNotNone(expected_container) # For simplicity, we will not work out timestamps expected_container['createdAt'] = api_container['createdAt'] expected_container['updatedAt'] = api_container['updatedAt'] # Assign the containerID expected_container['id'] = CONTAINER_ID print("This is the expected container object:") print(json.dumps(expected_container, indent=4, sort_keys=True)) # Close the file f2.close() print("Done\n") print("7 - Compare Containers") # Verify the container object returned by the API endpoint is the expected container object self.assertEqual(sorted(expected_container.items()),sorted(api_container.items())) print("Done\n")
"zipcode": "09000" }, "pickupDate": "2019-05-25", "expectedDeliveryDate": "2019-06-25" } containerEvent = { "orderID": id, "timestamp": int(time.time()), "type": "OrderCreated", "payload": data } return containerEvent def parseArguments(): if len(sys.argv) == 2: OID = sys.argv[1] else: OID = "itg-Ord02" print("The arguments are: ", str(sys.argv)) return OID if __name__ == '__main__': OID = parseArguments() evt = createOrder(OID) print(evt) kp = KafkaProducer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY) kp.prepareProducer("OrderProducerPython") kp.publishEvent('orders', evt, "orderID")
print("DONE") return event # Parse arguments to get the Kafka topic def parseArguments(): global TOPIC_NAME print("The arguments for this script are: " , str(sys.argv)) if len(sys.argv) == 2: TOPIC_NAME = sys.argv[1] else: print("[ERROR] - The ProducePlainMessage.py script expects one argument: The Kafka topic to publish the message to") exit(1) ####################### MAIN ####################### if __name__ == '__main__': # Get the Kafka topic from the arguments parseArguments() # Create the event to be sent event = createEvent() # Print it out print("--- Event to be published: ---") print(event) print("----------------------------------------") # Create the Kafka Producer kafka_producer = KafkaProducer(KAFKA_BROKERS,KAFKA_APIKEY) # Prepare the Kafka Producer kafka_producer.prepareProducer("ProducePlainMessagePython") # Publish the event kafka_producer.publishEvent(TOPIC_NAME,event,"eventKey")
"type": "ContainerAdded", "payload": data } print("DONE") return containerEvent # Parse arguments to get the Container ID def parseArguments(): global TOPIC_NAME, ID print("The arguments for the script are: ", str(sys.argv)) if len(sys.argv) == 3: ID = sys.argv[1] TOPIC_NAME = sys.argv[2] else: print( "[ERROR] - The ProduceContainer.py script expects two arguments: The container ID and the topic to send the container event to." ) exit(1) ####################### MAIN ####################### if __name__ == '__main__': parseArguments() evt = createContainer(ID) print("Container event to be published:") print(evt) kp = KafkaProducer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY) kp.prepareProducer("ProduceContainerPython") kp.publishEvent(TOPIC_NAME, evt, "containerID")