def test5_containerAnomalyDead(self): print('-----------------------------------------') print('-- [TEST] : ContainerAnomalyDead Event --') print('-----------------------------------------\n') print( "1 - Load the expected ContainerAnomalyDead event from its json file" ) # Open file to read f = open('../data/containerAnomalyDeadEvent.json', 'r') # Load the expected container anomaly dead event expected_container_anomaly_dead_event = json.load(f) # Verify we have read the files self.assertIsNotNone(expected_container_anomaly_dead_event) # Prepare expected container anomaly dead event with the containerID expected_container_anomaly_dead_event['containerID'] = CONTAINER_ID print("The expected container anomaly dead event is:") print( json.dumps(expected_container_anomaly_dead_event, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print( "2 - Read the container anomaly dead event from the container-anomaly-dead topic" ) # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, CONTAINER_ANOMALY_DEAD_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key container_anomaly_dead_event = kc.pollNextEventByKey(CONTAINER_ID) # Remove timestamp as it is not important for integration tests and would be hard to calculate container_anomaly_dead_event['timestamp'] = "" print( "This is the container anomaly dead event read from the container anomaly dead topic:" ) print( json.dumps(container_anomaly_dead_event, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("3 - Verify the container anomaly dead event") # Verify container anomaly dead event read from the topic is as expected self.assertEqual(sorted(container_anomaly_dead_event.items()), sorted(expected_container_anomaly_dead_event.items())) print("Done\n")
def test4_containerAnomalyRetry(self): print('------------------------------------------') print('-- [TEST] : ContainerAnomalyRetry Event --') print('------------------------------------------\n') print("1 - Load the expected ContainerAnomalyRetry event from its json file") # Open file to read f = open('../data/containerAnomalyRetryEvent.json','r') # Load the expected container anomaly retry event expected_container_anomaly_retry_event = json.load(f) # Verify we have read the files self.assertIsNotNone(expected_container_anomaly_retry_event) # Prepare expected container anomaly retry event with the containerID expected_container_anomaly_retry_event['containerID'] = CONTAINER_ID # Close the file f.close() print("Done\n") print("2 - Read the container anomaly retry events from the container-anomaly-retry topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV,KAFKA_BROKERS,KAFKA_USER,KAFKA_PASSWORD,CONTAINER_ANOMALY_RETRY_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) for i in range(3): retry = i + 1 print("Event number "+"{}".format(retry)) print("Sleeping for "+"{}".format(retry*10)+" seconds.") time.sleep(retry*10) # Set the retry for the expected container anomaly retry event expected_container_anomaly_retry_event['retries'] = retry print("Read a container anomaly retry event from the container anomaly retry topic") # Read next event in the topic by key container_anomaly_retry_event = kc.pollNextEventByKey(CONTAINER_ID) # Remove timestamp as it is not important for integration tests and would be hard to calculate container_anomaly_retry_event['timestamp'] = "" print("This is the container anomaly retry event read from the container anomaly retry topic:") print(json.dumps(container_anomaly_retry_event, indent=4, sort_keys=True)) print("This is the expected container anomaly retry event:") print(json.dumps(expected_container_anomaly_retry_event, indent=4, sort_keys=True)) print("Compare container anomaly events") # Verify the container anomaly event received in the topic is the expected container anomaly event self.assertEqual(sorted(container_anomaly_retry_event.items()),sorted(expected_container_anomaly_retry_event.items())) # Close the Kafka/Event Streams consumer kc.close() print("Done\n")
def test5_orderRejected(self): print('-------------------------------') print('--- [TEST] : Order Rejected ---') print('-------------------------------\n') print( "1 - Load the expected OrderRejected event on the orders topic from its json files" ) # Open file to read f = open('../data/orderRejectedNoVoyageEvent.json', 'r') # Load the expected OrderRejected expected_order = json.load(f) # Verify we have read the files self.assertIsNotNone(expected_order) # Prepare expected OrderRejected event with the orderID and containerID expected_order['payload']['orderID'] = ORDER_ID expected_order['payload']['containerID'] = CONTAINER_ID print("The expected OrderRejected event is:") print(json.dumps(expected_order, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print("2 - Read OrderRejected event from the orders topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, ORDERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key order_event = kc.pollNextEventByKey(ORDER_ID) # Remove timestamp as it is not important for integration tests and would be hard to calculate order_event['timestampMillis'] = "" print("This is the OrderRejected event read from the orders topic:") print(json.dumps(order_event, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("3 - Verify OrderRejected event") # Verify OrderRejected event read from the orders topic is as expected self.assertEqual(sorted(expected_order.items()), sorted(order_event.items())) print("Done\n")
def test5_voyageAssigned(self): print('--------------------------------') print('--- [TEST] : Voyage Assigned ---') print('--------------------------------\n') print( "1 - Load the expected voyage assigned event on the order topic from its json files" ) # Open file to read f_voyage = open('../data/orderVoyageAssignedEvent.json', 'r') # Load the expected voyage assigned event expected_voyage_assigned = json.load(f_voyage) # Verify we have read the files self.assertIsNotNone(expected_voyage_assigned) # Prepare expected voyage assigned event with orderID expected_voyage_assigned['payload']['orderID'] = ORDER_ID print("The expected voyage assigned event is:") print(json.dumps(expected_voyage_assigned, indent=4, sort_keys=True)) # Close the file f_voyage.close() print("Done\n") print("2 - Read voyage assigned from oder topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_USER, KAFKA_PASSWORD, ORDERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key voyage_assigned = kc.pollNextEventByKey(ORDER_ID) # Remove timestamp as it is not important for integration tests and would be hard to calculate voyage_assigned['timestamp'] = "" print("This is the event read from the order topic:") print(json.dumps(voyage_assigned, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("3 - Verify voyage assigned event") # Verify voyage assigned event read from the topic is as expected self.assertEqual(sorted(expected_voyage_assigned.items()), sorted(voyage_assigned.items())) print("Done\n")
def test4_voyageNotFound(self): print('---------------------------------') print('--- [TEST] : Voyage Not Found ---') print('---------------------------------\n') print("1 - Load the expected voyageNotFound event on the orders topic from its json files") # Open file to read f = open('../data/voyageNotFoundEvent.json','r') # Load the expected event expected_event = json.load(f) # Verify we have read the file self.assertIsNotNone(expected_event) # Prepare expected event with the orderID expected_event['payload']['orderID'] = ORDER_ID print("The expected ContainerNotFound event is:") print(json.dumps(expected_event, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print("2 - Read the VoyageNotFound event from the orders topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(ORDERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key voyage_event = kc.pollNextEventByKey(ORDER_ID) # Remove timestamp as it is not important for integration tests and would be hard to calculate voyage_event['timestamp'] = "" print("This is the ContainerNotFound event read from the orders topic:") print(json.dumps(voyage_event, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("3 - Verify VoyageNotFound expected event and read event") # Verify ContainerNotFound event read from the topic is as expected self.assertEqual(sorted(expected_event.items()),sorted(voyage_event.items())) print("Done\n")
print( "The KAFKA_APIKEY environment variable not set... assume local deployment" ) try: KAFKA_ENV = os.environ['KAFKA_ENV'] except KeyError: KAFKA_ENV = 'LOCAL' CID = "c_1" TOPIC_NAME = "containers" def parseArguments(): if len(sys.argv) != 2: print("Need to have at least one argument container ID") exit(1) CID = sys.argv[1] print("The arguments are: ", str(sys.argv)) print(KAFKA_ENV) print(KAFKA_BROKERS) print(KAFKA_APIKEY) if __name__ == '__main__': parseArguments() consumer = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, TOPIC_NAME) consumer.prepareConsumer() consumer.pollNextEvent(CID, 'containerID') consumer.close()
def test5_orderCancelled(self): print('--------------------------------') print('--- [TEST] : Order Cancalled ---') print('--------------------------------\n') print("1 - Cancel order by POST to order microservice's API endpoint") res = requests.post("http://" + ORDER_CMD_MS + "/orders/cancel/" + ORDER_ID) # Verify the post request has been successful self.assertEqual(res.status_code, 200) print("Done\n") print("Sleeping for 5 secs\n") time.sleep(10) print( "2 - Make sure a new cancel order command event was delivered into the order-commands topic" ) # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, ORDER_COMMANDS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key cancel_order_command = kc.pollNextEventByKey(ORDER_ID) # Verify an order command event object is read self.assertIsNotNone(cancel_order_command) # Removing the timestamp from the comparison since we can't know what time exactly it was created at cancel_order_command['timestampMillis'] = "" print("This is the order command event read from the topic:") print(json.dumps(cancel_order_command, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print( "3 - Load the expected cancel order command event from json file") # Open file to read f = open('../data/cancelOrderCommandEvent.json', 'r') # Load expected order command event expected_cancel_order_command = json.load(f) # Verify we have read a container self.assertIsNotNone(expected_cancel_order_command) # Assign the orderID expected_cancel_order_command['payload']['orderID'] = ORDER_ID print("The expected cancel order command event is:") print( json.dumps(expected_cancel_order_command, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print("4 - Verify order command event") # Verify order command event read from the topic is as expected self.assertEqual(sorted(expected_cancel_order_command.items()), sorted(cancel_order_command.items())) print("Done\n") print("Sleeping for 5 secs\n") time.sleep(10) print( "5 - Load the expected OrderCancelled event on the orders topic from its json files" ) # Open file to read f = open('../data/orderCancelled.json', 'r') # Load the expected OrderCancelled expected_order = json.load(f) # Verify we have read the files self.assertIsNotNone(expected_order) # Prepare expected OrderCancelled event with the orderID and containerID expected_order['payload']['orderID'] = ORDER_ID expected_order['payload']['containerID'] = CONTAINER_ID print("The expected OrderCancelled event is:") print(json.dumps(expected_order, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print("6 - Read OrderCancelled event from the orders topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, ORDERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key order_event = kc.pollNextEventByKey(ORDER_ID) # Remove timestamp as it is not important for integration tests and would be hard to calculate order_event['timestampMillis'] = "" print("This is the OrderCancelled event read from the orders topic:") print(json.dumps(order_event, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("7 - Verify OrderCancelled event") # Verify OrderRejected event read from the orders topic is as expected self.assertEqual(sorted(expected_order.items()), sorted(order_event.items())) print("Done\n")
def test2_containerAllocated(self): print('------------------------------------') print('--- [TEST] : Container Allocated ---') print('------------------------------------\n') print( "1 - Load the expected container assigned to order event on the containers topic from its json files" ) # Open file to read f_container = open('../data/containerAssignedToOrderEvent.json', 'r') # Load the expected container expected_container = json.load(f_container) # Verify we have read the files self.assertIsNotNone(expected_container) ### Again, for this scenario, we fill up the expected container ID with the actual container ID allocated # Prepare expected container assigned to order event with the containerID and orderID expected_container['containerID'] = CONTAINER_ID expected_container['payload']['orderID'] = ORDER_ID expected_container['payload']['containerID'] = CONTAINER_ID print("The expected container assigned to order event is:") print(json.dumps(expected_container, indent=4, sort_keys=True)) # Close the file f_container.close() print("Done\n") print( "2 - Read container assigned to order event from the containers topic" ) # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, CONTAINERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key container_event = kc.pollNextEventByKey(CONTAINER_ID) # Remove timestamp as it is not important for integration tests and would be hard to calculate container_event['timestamp'] = "" print( "This is the container assigned to order event read from the containers topic:" ) print(json.dumps(container_event, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("3 - Verify container assigned to order event") # Verify container assigned to order event read from the topic is as expected self.assertEqual(sorted(expected_container.items()), sorted(container_event.items())) print("Done\n") print( "4 - Load the expected container allocated event on the order topic from its json files" ) # Open file to read f_order = open('../data/orderContainerAllocatedEvent.json', 'r') # Load the expected container allocated event expected_container_allocated = json.load(f_order) # Verify we have read the files self.assertIsNotNone(expected_container_allocated) # Prepare expected container allocated event with the appropriate orderID and containerID expected_container_allocated['orderID'] = ORDER_ID expected_container_allocated['payload']['orderID'] = ORDER_ID expected_container_allocated['payload']['containerID'] = CONTAINER_ID print("The expected container allocated event is:") print( json.dumps(expected_container_allocated, indent=4, sort_keys=True)) # Close the file f_order.close() print("Done\n") print("5 - Read container allocated event from the oder topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, ORDERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key container_allocated = kc.pollNextEventByKey(ORDER_ID) # Remove timestamp as it is not important for integrations tests and would be hard to calculate container_allocated['timestamp'] = "" print("This is the event read from the order topic:") print(json.dumps(container_allocated, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("6 - Verify container allocated event") # Verify container allocated event read from the topic is as expected self.assertEqual(sorted(expected_container_allocated.items()), sorted(container_allocated.items())) print("Done\n")
def test1_createOrder(self): print('-----------------------------') print('--- [TEST] : Create order ---') print('-----------------------------\n') # We must use the global scope variable as this value will be used throughout the entire test global ORDER_ID print("1 - Load the order request from json") # Open file to read f = open('../data/FreshProductOrder.json', 'r') # Load the order to be sent order = json.load(f) # Close the file f.close() print("Done\n") print("2 - Create order by POST to order microservice's API endpoint") res = requests.post("http://" + ORDER_CMD_MS + "/orders", json=order) # Get the request response as a JSON object orderCommand = json.loads(res.text) # Grab the orderID from the JSON object ORDER_ID = orderCommand['orderID'] print("The order ID for the order created is: {}".format(ORDER_ID)) # Verify ORDER_ID is not None self.assertIsNotNone(ORDER_ID) # Verify ORDER_ID is not an empty string self.assertNotEqual(str(ORDER_ID), "") print("Done\n") print("Sleeping for 5 secs\n") time.sleep(10) print( "3 - Make sure a new order command event was delivered into the order-commands topic" ) # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, ORDER_COMMANDS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key order_command = kc.pollNextEventByKey(ORDER_ID) # Verify an order command event object is read self.assertIsNotNone(order_command) # Removing the timestamp from the comparison since we can't know what time exactly it was created at order_command['timestampMillis'] = "" print("This is the order command event read from the topic:") print(json.dumps(order_command, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("4 - Load the expected order command event from json file") # Open file to read f = open('../data/orderCommandEvent.json', 'r') # Load expected order command event expected_order_command = json.load(f) # Verify we have read a container self.assertIsNotNone(expected_order_command) # Assign the orderID expected_order_command['payload']['orderID'] = ORDER_ID print("The expected order command event is:") print(json.dumps(expected_order_command, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print("5 - Verify order command event") # Verify order command event read from the topic is as expected self.assertEqual(sorted(expected_order_command.items()), sorted(order_command.items())) print("Done\n") print("Sleeping for 5 secs\n") time.sleep(10) print( "6 - Make sure a new order event was delivered into the orders topic" ) # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, ORDERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key order = kc.pollNextEventByKey(ORDER_ID) # Verify an order command event object is read self.assertIsNotNone(order) # Removing the timestamp from the comparison since we can't know what time exactly it was created at order['timestampMillis'] = "" print("This is the order event read from the topic:") print(json.dumps(order, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("7 - Load the expected order event from json file") # Open file to read f = open('../data/orderCreatedEvent.json', 'r') # Load expected order event expected_order = json.load(f) # Verify we have read a container self.assertIsNotNone(expected_order) # Assign orderID expected_order['payload']['orderID'] = ORDER_ID print("The expected order event is:") print(json.dumps(expected_order, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print("8 - Verify order event") # Verify order event read from the topic is as expected self.assertEqual(sorted(expected_order.items()), sorted(order.items())) print("Done\n")
try: KAFKA_APIKEY = os.environ['KAFKA_APIKEY'] except KeyError: print( "The KAFKA_APIKEY environment variable not set... assume local deployment" ) try: KAFKA_ENV = os.environ['KAFKA_ENV'] except KeyError: KAFKA_ENV = 'LOCAL' TOPIC_NAME = 'orders' OID = '' def parseArguments(): if len(sys.argv) != 2: print("Need to have at least one argument order ID") exit(1) OID = sys.argv[1] print("The arguments are: ", str(sys.argv)) if __name__ == '__main__': parseArguments() orderConsumer = KafkaConsumer(KAFKA_ENV, KAFKA_BROKERS, KAFKA_APIKEY, TOPIC_NAME) orderConsumer.prepareConsumer() orderConsumer.pollNextEvent(OID, 'orderID') orderConsumer.close()
def test1_createContainer(self): print('-------------------------------') print('-- [TEST] : Create container --') print('-------------------------------\n') print("1 - Load the container event from json file") # Open file to read f = open('../data/containerCreateEvent.json','r') # Load the container from file new_container = json.load(f) # Verify we have read a container self.assertIsNotNone(new_container) # Provide the timestamp for the creation time of the container/event new_container['timestamp'] = int(time.time()) # Verify the container has a valid timestamp self.assertGreater(new_container['timestamp'],0) # Provide the container ID new_container['containerID'] = CONTAINER_ID new_container['payload']['containerID'] = CONTAINER_ID print("Container event to be sent:") print(json.dumps(new_container, indent=4, sort_keys=True)) # Close file f.close() print("Done\n") print("2 - Post container event into the containers topic") # Create a KafkaProducer object to interact with Kafka/Event Streams kp = KafkaProducer(KAFKA_ENV,KAFKA_BROKERS,KAFKA_APIKEY) # Verify we have a KafkaProducer object self.assertIsNotNone(kp) kp.prepareProducer("ProduceContainerPython") # Verify the producer has been created self.assertIsNotNone(kp.producer) # Publish the create container event kp.publishEvent(CONTAINERS_TOPIC,new_container,"containerID") print("Done\n") print("Sleeping for 5 secs\n") time.sleep(5) print("3 - Read container event from the containers topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV,KAFKA_BROKERS,KAFKA_APIKEY,CONTAINERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key read_container = kc.pollNextEventByKey(CONTAINER_ID) # A container event object is read self.assertIsNotNone(read_container) print("This is the container event read:") print(json.dumps(read_container, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("4 - Compare events") # Verify new container event sent and container event read from the topic are the same self.assertEqual(sorted(new_container.items()),sorted(read_container.items())) print("Done\n") print("5 - Read container object from the container microservice's API endpoint") response = requests.get("http://" + CONTAINER_SPRING_MS + "/containers") # Verify we get a response self.assertIsNotNone(response) # Load containers from the response json_data = json.loads(response.text) # Verify we get at least one container back self.assertGreater(len(json_data['content']),0) # Get latest container api_container = json_data['content'][len(json_data['content'])-1] # Verify we have a container self.assertIsNotNone(api_container) print("This is the API container object") print(json.dumps(api_container, indent=4, sort_keys=True)) print("Done\n") print("6 - Read expected empty container from json file") # Open file to read f2 = open('../data/containerEmptyEvent.json','r') # Load the expected container object expected_container = json.load(f2) # Verify we have a container self.assertIsNotNone(expected_container) # For simplicity, we will not work out timestamps expected_container['createdAt'] = api_container['createdAt'] expected_container['updatedAt'] = api_container['updatedAt'] # Assign the containerID expected_container['id'] = CONTAINER_ID print("This is the expected container object:") print(json.dumps(expected_container, indent=4, sort_keys=True)) # Close the file f2.close() print("Done\n") print("7 - Compare Containers") # Verify the container object returned by the API endpoint is the expected container object self.assertEqual(sorted(expected_container.items()),sorted(api_container.items())) print("Done\n")
def test4_containerAllocated(self): print('------------------------------------') print('--- [TEST] : Container Allocated ---') print('------------------------------------\n') print("1 - Load the expected container assigned to order event on the containers topic from its json files") # Open file to read f_container = open('../data/containerAssignedToOrderEvent.json','r') # Load the expected container expected_container = json.load(f_container) # Verify we have read the files self.assertIsNotNone(expected_container) # Prepare expected container assigned to order event with the containerID and orderID expected_container['containerID'] = CONTAINER_ID expected_container['payload']['orderID'] = ORDER_ID expected_container['payload']['containerID'] = CONTAINER_ID print("The expected container assigned to order event is:") print(json.dumps(expected_container, indent=4, sort_keys=True)) # Close the file f_container.close() print("Done\n") print("2 - Read container assigned to order event from the containers topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV,KAFKA_BROKERS,KAFKA_APIKEY,CONTAINERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key container_event = kc.pollNextEventByKey(CONTAINER_ID) # Remove timestamp as it is not important for integration tests and would be hard to calculate container_event['timestamp'] = "" print("This is the container assigned to order event read from the containers topic:") print(json.dumps(container_event, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("3 - Verify container assigned to order event") # Verify container assigned to order event read from the topic is as expected self.assertEqual(sorted(expected_container.items()),sorted(container_event.items())) print("Done\n") print("4 - Load the expected container allocated event on the order topic from its json files") # Open file to read f_order = open('../data/orderContainerAllocatedEvent.json','r') # Load the expected container allocated event expected_container_allocated = json.load(f_order) # Verify we have read the files self.assertIsNotNone(expected_container_allocated) # Prepare expected container allocated event with the appropriate orderID and containerID expected_container_allocated['orderID'] = ORDER_ID expected_container_allocated['payload']['orderID'] = ORDER_ID expected_container_allocated['payload']['containerID'] = CONTAINER_ID print("The expected container allocated event is:") print(json.dumps(expected_container_allocated, indent=4, sort_keys=True)) # Close the file f_order.close() print("Done\n") print("5 - Read container allocated event from the oder topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(KAFKA_ENV,KAFKA_BROKERS,KAFKA_APIKEY,ORDERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key container_allocated = kc.pollNextEventByKey(ORDER_ID) # Remove timestamp as it is not important for integrations tests and would be hard to calculate container_allocated['timestamp'] = "" print("This is the event read from the order topic:") print(json.dumps(container_allocated, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("6 - Verify container allocated event") # Verify container allocated event read from the topic is as expected self.assertEqual(sorted(expected_container_allocated.items()),sorted(container_allocated.items())) print("Done\n") print("7 - Read container object from the container microservice's API endpoint") response = requests.get("http://" + CONTAINER_SPRING_MS + "/containers") # Verify we get a response self.assertIsNotNone(response) # Get the containers from the response json_data = json.loads(response.text) # Verify we get at least one container back self.assertGreater(len(json_data['content']),0) # Get the latest container api_container = json_data['content'][len(json_data['content'])-1] # For simplicity, we will not work out timestamps api_container['createdAt'] = "" api_container['updatedAt'] = "" print("This is the API container object") print(json.dumps(api_container, indent=4, sort_keys=True)) print("Done\n") print("8 - Read expected loaded container from json file") # Open file to read f = open('../data/containerLoadedEvent.json','r') # Load the expected loaded container expected_loaded_container = json.load(f) # Verify we have a read a container object self.assertIsNotNone(expected_loaded_container) # Fill in the container ID expected_loaded_container['id'] = CONTAINER_ID print("This is the expected container object:") print(json.dumps(expected_loaded_container, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print("9 - Compare Containers") # Verify the container object returned by the API endpoint is the expected container object self.assertEqual(sorted(expected_loaded_container.items()),sorted(api_container.items())) print("Done\n")
def test4_containerOrderSpoilt(self): print('---------------------------') print('-- [TEST] : Order Spoilt --') print('---------------------------\n') print("1 - Load the expected order spoilt event its json file") # Open file to read f = open('../data/orderSpoiltEvent.json', 'r') # Load the expected order spoilt event expected_order_spoilt_event = json.load(f) # Verify we have read the files self.assertIsNotNone(expected_order_spoilt_event) # Prepare expected container assigned to order event with the containerID and orderID expected_order_spoilt_event['orderID'] = ORDER_ID expected_order_spoilt_event['payload']['orderID'] = ORDER_ID expected_order_spoilt_event['payload']['containerID'] = CONTAINER_ID print("The expected container assigned to order event is:") print(json.dumps(expected_order_spoilt_event, indent=4, sort_keys=True)) # Close the file f.close() print("Done\n") print("2 - Read the order spoilt event from the orders topic") # Create a KafkaConsumer object to interact with Kafka/Event Streams kc = KafkaConsumer(ORDERS_TOPIC) # Verify we have a KafkaConsumer object self.assertIsNotNone(kc) kc.prepareConsumer() # Verify the consumer has been created self.assertIsNotNone(kc.consumer) # Read next event in the topic by key order_spoilt_event = kc.pollNextEventByKey(ORDER_ID) # Remove timestamp as it is not important for integration tests and would be hard to calculate order_spoilt_event['timestamp'] = "" print("This is the order spoilt event read from the orders topic:") print(json.dumps(order_spoilt_event, indent=4, sort_keys=True)) # Close the Kafka/Event Streams consumer kc.close() print("Done\n") print("3 - Verify the order spoilt event") # Verify order spoilt event read from the topic is as expected self.assertEqual(sorted(expected_order_spoilt_event.items()), sorted(order_spoilt_event.items())) print("Done\n") print("4 - Load the expected resulting order for Order Command") # Open file to read f_order_command = open('../data/orderSpoiltRESTOrderCommand.json', 'r') # Load the expected order command expected_order_command = json.load(f_order_command) # Verify we have read the file self.assertIsNotNone(expected_order_command) # Prepare expected container allocated event with orderID and containerID expected_order_command['orderID'] = ORDER_ID expected_order_command['containerID'] = CONTAINER_ID print("The expected resulting order is:") print(json.dumps(expected_order_command, indent=4, sort_keys=True)) # Close the file f_order_command.close() print("Done\n") print( "5 - Read order from the order command microservice's API endpoint" ) response = requests.get("http://" + ORDER_CMD_MS + "/orders/" + ORDER_ID) # Verify we get an http 200 response self.assertEqual(200, response.status_code) # Load the order from the API's response order_command = json.loads(response.text) print("This is the order from the order command microservice's API") print(json.dumps(order_command, indent=4, sort_keys=True)) print("Done\n") print("6 - Verify order") # Verify order from the order command API's endpoint is as expected self.assertEqual(sorted(expected_order_command.items()), sorted(order_command.items())) print("Done\n") print("7 - Load the expected resulting order for Order Query") # Open file to read f_order_query = open('../data/orderSpoiltRESTOrderQuery.json', 'r') # Load the expected order object for the order query microservice expected_order_query = json.load(f_order_query) # Verify we have read the file self.assertIsNotNone(expected_order_query) # Prepare expected container allocated event with orderID and containerID expected_order_query['orderID'] = ORDER_ID expected_order_query['containerID'] = CONTAINER_ID print("The expected resulting order is:") print(json.dumps(expected_order_query, indent=4, sort_keys=True)) # Close the file f_order_query.close() print("Done\n") print( "8 - Read order from the order query microservice's API endpoint") response = requests.get("http://" + ORDER_QUERY_MS + "/orders/" + ORDER_ID) # Verify we get an http 200 response self.assertEqual(200, response.status_code) # Load the order from the order query API's response order_query = json.loads(response.text) print("This is the order from the order command microservice's API") print(json.dumps(order_query, indent=4, sort_keys=True)) print("Done\n") print("9 - Verify order") # Verify order from the order query microservice API's is as expected self.assertEqual(sorted(expected_order_query.items()), sorted(order_query.items())) print("Done\n")
print("[ERROR] - The SCRAM_PASSWORD environment variable needs to be set") exit(1) ####################### FUNCTIONS ####################### # Parse arguments to get the Kafka topic def parseArguments(): global TOPIC_NAME print("The arguments for this script are: ", str(sys.argv)) if len(sys.argv) == 2: TOPIC_NAME = sys.argv[1] else: print( "[ERROR] - The ConsumePlainMessage.py script expects one argument: The Kafka topic to consume messages from" ) exit(1) ####################### MAIN ####################### if __name__ == '__main__': # Parse arguments to get the topic to read from parseArguments() # Create a Kafka Consumer kafka_consumer = KafkaConsumer(KAFKA_BROKERS, SCRAM_USERNAME, SCRAM_PASSWORD, TOPIC_NAME) # Prespare the consumer kafka_consumer.prepareConsumer() # Poll for next message kafka_consumer.pollNextEvent() # Close the consumer kafka_consumer.close()
except KeyError: print("[ERROR] - The KAFKA_APIKEY environment variable needs to be set") exit(1) ####################### FUNCTIONS ####################### # Parse arguments to get the Kafka topic def parseArguments(): global TOPIC_NAME print("The arguments for this script are: ", str(sys.argv)) if len(sys.argv) == 2: TOPIC_NAME = sys.argv[1] else: print( "[ERROR] - The ConsumePlainMessage.py script expects one argument: The Kafka topic to consume messages from" ) exit(1) ####################### MAIN ####################### if __name__ == '__main__': # Parse arguments to get the topic to read from parseArguments() # Create a Kafka Consumer kafka_consumer = KafkaConsumer(KAFKA_BROKERS, KAFKA_APIKEY, TOPIC_NAME) # Prespare the consumer kafka_consumer.prepareConsumer() # Poll for next message kafka_consumer.pollNextEvent() # Close the consumer kafka_consumer.close()
try: ORDER_CMD_MS = os.environ['ORDER_CMD_MS'] except KeyError: ORDER_CMD_MS = "ordercmd:9080" try: ORDER_QUERY_MS = os.environ['ORDER_QUERY_MS'] except: ORDER_QUERY_MS = "orderquery:9080" # listen to orders topic, verify orderCreated event was published from kafka.KcConsumer import KafkaConsumer TOPIC_NAME = 'orders' orderConsumer = KafkaConsumer(KAFKA_ENV,KAFKA_BROKERS,KAFKA_APIKEY,TOPIC_NAME,False) orderConsumer.prepareConsumer() def pollNextOrder(orderid): return orderConsumer.pollNextEvent(orderid,'orderID') class TestEventSourcingHappyPath(unittest.TestCase): def test_createOrder(self): print("1- load the order request from json") f = open('../data/FreshProductOrder.json','r') order = json.load(f) f.close() print("2- create order by doing a POST on /api/orders of the orders command service") res = requests.post("http://" + ORDER_CMD_MS + "/orders",json=order) orderID=json.loads(res.text)['orderID']