class Test_Cleanup_Queue(): def setup_class(self): """Setup class parameters""" if 'SQS_ENDPOINT' in dir(settings): self.endpoint_url = settings.SQS_ENDPOINT else: self.endpoint_url = None CleanupQueue.createQueue(nd_proj, endpoint_url=self.endpoint_url) self.cleanup_queue = CleanupQueue(nd_proj, endpoint_url=self.endpoint_url) def teardown_class(self): """Teardown parameters""" CleanupQueue.deleteQueue(nd_proj, endpoint_url=self.endpoint_url) def test_Message(self): """Testing the upload queue""" supercuboid_key = 'kasthuri11&image&0&0' self.cleanup_queue.sendMessage(supercuboid_key) for message_id, receipt_handle, message_body in self.cleanup_queue.receiveMessage(): assert(supercuboid_key == message_body) response = self.cleanup_queue.deleteMessage(message_id, receipt_handle) assert('Successful' in response) def test_sendBatchMessages(self): fake_data0 = {'foo': 'bar'} fake_data1 = {'john': 'doe'} jsonized0 = json.dumps(fake_data0) jsonized1 = json.dumps(fake_data1) md5_0 = hashlib.md5(jsonized0.encode('utf-8')).hexdigest() md5_1 = hashlib.md5(jsonized1.encode('utf-8')).hexdigest() try: response = self.cleanup_queue.sendBatchMessages([fake_data0, fake_data1], 0) assert('Successful' in response) success_ids = [] for msg_result in response['Successful']: id = msg_result['Id'] success_ids.append(id) if id == '0': assert(md5_0 == msg_result['MD5OfMessageBody']) elif id == '1': assert(md5_1 == msg_result['MD5OfMessageBody']) assert('0' in success_ids) assert('1' in success_ids) finally: for message_id, receipt_handle, _ in self.cleanup_queue.receiveMessage(): self.cleanup_queue.deleteMessage(message_id, receipt_handle)
def test_sendBatchMessages(sqs): fake_data0 = {"foo": "bar"} fake_data1 = {"john": "doe"} jsonized0 = json.dumps(fake_data0) jsonized1 = json.dumps(fake_data1) md5_0 = hashlib.md5(jsonized0.encode("utf-8")).hexdigest() md5_1 = hashlib.md5(jsonized1.encode("utf-8")).hexdigest() proj = generate_proj() from ndingest.ndqueue.cleanupqueue import CleanupQueue CleanupQueue.createQueue(proj) cleanup_queue = CleanupQueue(proj) try: response = cleanup_queue.sendBatchMessages([fake_data0, fake_data1], 0) assert "Successful" in response success_ids = [] for msg_result in response["Successful"]: id = msg_result["Id"] success_ids.append(id) if id == "0": assert md5_0 == msg_result["MD5OfMessageBody"] elif id == "1": assert md5_1 == msg_result["MD5OfMessageBody"] assert "0" in success_ids assert "1" in success_ids finally: for message_id, receipt_handle, _ in cleanup_queue.receiveMessage(): cleanup_queue.deleteMessage(message_id, receipt_handle)
def test_message(sqs): """Testing the upload queue""" proj = generate_proj() from ndingest.ndqueue.cleanupqueue import CleanupQueue CleanupQueue.createQueue(proj) cleanup_queue = CleanupQueue(proj) supercuboid_key = "kasthuri11&image&0&0" cleanup_queue.sendMessage(supercuboid_key) for message_id, receipt_handle, message_body in cleanup_queue.receiveMessage(): assert supercuboid_key == message_body response = cleanup_queue.deleteMessage(message_id, receipt_handle) assert "Successful" in response
class Test_UploadLambda: def setup_class(self): """Setup class parameters""" # create the tile index table. skip if it exists try: TileIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) CuboidIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) except Exception as e: pass self.tileindex_db = TileIndexDB(nd_proj.project_name, endpoint_url=settings.DYNAMO_ENDPOINT) self.tile_bucket = TileBucket(nd_proj.project_name, endpoint_url=settings.S3_ENDPOINT) [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0] supercuboid_key = 'testing' message_id = '123456' receipt_handle = 'testing123456' message = serializer.encodeDeleteMessage(supercuboid_key, message_id, receipt_handle) # insert message in the upload queue CleanupQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.cleanup_queue = CleanupQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.cleanup_queue.sendMessage(message) # receive message and upload object for z_index in range(self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1): tile_handle = cStringIO.StringIO() self.tile_bucket.putObject(tile_handle, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, z_index, message_id, receipt_handle) def teardown_class(self): """Teardown class parameters""" TileIndexDB.deleteTable(endpoint_url=settings.DYNAMO_ENDPOINT) CuboidIndexDB.deleteTable(endpoint_url=settings.DYNAMO_ENDPOINT) CleanupQueue.deleteQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) def test_Uploadevent(self): """Testing the event""" # creating an emulambda function func = emulambda.import_lambda('cleanuplambda.lambda_handler') # creating an emulambda event event = emulambda.parse_event( open('../ndlambda/functions/cleanup/cleanup_event.json').read()) # calling the emulambda function to invoke a lambda emulambda.invoke_lambda(func, event, None, 0, None) # test if there are any tiles leftover in tile bucket for z_index in range(self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1): tile = self.tile_bucket.getObject(nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, z_index) assert (tile is None) # check if there are any entires left in the tileindex table supercuboid_key = self.tileindex_db.generatePrimaryKey( nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, self.z_tile) item = self.tileindex_db.getItem(supercuboid_key) assert (item is None) # testing if the message was deleted from the cleanup queue or not for message in self.cleanup_queue.receiveMessage(): # KL TODO write the message id into the JSON event file directly print message