def test_sendBatchMessages(sqs): fake_data0 = {"foo": "bar"} fake_data1 = {"john": "doe"} jsonized0 = json.dumps(fake_data0) jsonized1 = json.dumps(fake_data1) md5_0 = hashlib.md5(jsonized0.encode("utf-8")).hexdigest() md5_1 = hashlib.md5(jsonized1.encode("utf-8")).hexdigest() proj = generate_proj() from ndingest.ndqueue.uploadqueue import UploadQueue UploadQueue.createQueue(proj) upload_queue = UploadQueue(proj) try: response = upload_queue.sendBatchMessages([jsonized0, jsonized1], 0) assert "Successful" in response success_ids = [] for msg_result in response["Successful"]: id = msg_result["Id"] success_ids.append(id) if id == "0": assert md5_0 == msg_result["MD5OfMessageBody"] elif id == "1": assert md5_1 == msg_result["MD5OfMessageBody"] assert "0" in success_ids assert "1" in success_ids finally: for message_id, receipt_handle, _ in upload_queue.receiveMessage(): upload_queue.deleteMessage(message_id, receipt_handle)
def test_generate_upload_tasks(self): """""" try: ingest_mgmr = IngestManager() ingest_job = ingest_mgmr.setup_ingest(self.user.id, self.example_config_data) ingest_mgmr.generate_upload_tasks(ingest_job.id) assert (ingest_job.collection == 'my_col_1') assert (ingest_job.experiment == 'my_exp_1') assert (ingest_job.channel == 'my_ch_1') # Pull the messages off the queue proj_class = BossIngestProj.load() nd_proj = proj_class(ingest_job.collection, ingest_job.experiment, ingest_job.channel, ingest_job.resolution, ingest_job.id) queue = UploadQueue(nd_proj, endpoint_url=None) tmp = queue.receiveMessage(number_of_messages=10) # receive message from the queue for message_id, receipt_handle, message_body in tmp: assert(message_body['job_id'] == ingest_job.id) # delete message from the queue response = queue.deleteMessage(message_id, receipt_handle) assert ('Successful' in response) ingest_mgmr.remove_ingest_credentials(ingest_job.id) except: raise finally: ingest_mgmr.delete_upload_queue() ingest_mgmr.delete_ingest_queue()
def test_message(sqs): """Test put, get and delete Message""" x_tile = 0 y_tile = 0 proj = generate_proj() from ndingest.ndqueue.uploadqueue import UploadQueue UploadQueue.createQueue(proj) upload_queue = UploadQueue(proj) for z_tile in range(0, 2, 1): # encode the message message = serializer.encodeUploadMessage( proj.project_name, proj.channel_name, proj.resolution, x_tile, y_tile, z_tile, ) # send message to the queue upload_queue.sendMessage(message) # receive message from the queue for message_id, receipt_handle, message_body in upload_queue.receiveMessage( number_of_messages=3 ): # check if we get the tile_info back correctly assert message_body["z_tile"] in [0, 1, 2] # delete message from the queue response = upload_queue.deleteMessage(message_id, receipt_handle) # check if the message was sucessfully deleted assert "Successful" in response
class Test_UploadLambda: def setup_class(self): """Setup class parameters""" # create the tile index table. skip if it exists try: TileIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) except Exception as e: pass self.tileindex_db = TileIndexDB(nd_proj.project_name, endpoint_url=settings.DYNAMO_ENDPOINT) # create the ingest queue IngestQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) # create the upload queue UploadQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.upload_queue = UploadQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) tile_bucket = TileBucket(nd_proj.project_name, endpoint_url=settings.S3_ENDPOINT) [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0] message = serializer.encodeUploadMessage( nd_proj.project_name, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, self.z_tile, ) # insert message in the upload queue self.upload_queue.sendMessage(message) # receive message and upload object for ( message_id, receipt_handle, message_body, ) in self.upload_queue.receiveMessage(): tile_handle = cStringIO.StringIO() tile_bucket.putObject( tile_handle, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, self.z_tile, message_id, receipt_handle, ) def teardown_class(self): """Teardown class parameters""" TileIndexDB.deleteTable(endpoint_url=settings.DYNAMO_ENDPOINT) IngestQueue.deleteQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) UploadQueue.deleteQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) def test_Uploadevent(self): """Testing the event""" # creating an emulambda function func = emulambda.import_lambda("uploadlambda.lambda_handler") # creating an emulambda event event = emulambda.parse_event( open("../ndlambda/functions/upload/upload_event.json").read()) # calling the emulambda function to invoke a lambda emulambda.invoke_lambda(func, event, None, 0, None) # testing if the index was updated in tileindexdb supercuboid_key = self.tileindex_db.generatePrimaryKey( nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, self.z_tile, ) item = self.tileindex_db.getItem(supercuboid_key) assert item["zindex_list"] == set([0]) # testing if the message was deleted from the upload queue or not for message in self.upload_queue.receiveMessage(): assert False