def teardown_class(self): """Teardown class parameters""" # cleanup tilebucket for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1): tile_key = self.tile_bucket.encodeObjectKey( nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, z_index, ) self.tile_bucket.deleteObject(tile_key) morton_index = XYZMorton(self.tiles) supercuboid_key = self.cuboid_bucket.generateSupercuboidKey( nd_proj.channel_name, nd_proj.resolution, self.tiles) self.cuboid_bucket.deleteObject(supercuboid_key) # delete created entities TileIndexDB.deleteTable(endpoint_url="http://localhost:8000") CuboidIndexDB.deleteTable(endpoint_url="http://localhost:8000") IngestQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568") CleanupQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568") TileBucket.deleteBucket(endpoint_url="http://localhost:4567") try: CuboidBucket.deleteBucket(endpoint_url="http://localhost:4567") except Exception as e: pass
def createIngestJob(self, user_id, config_data): """Create an ingest job based on the posted config data""" config_data = json.loads(config_data) # validate schema if self.validateConfig(config_data): try: # create the upload queue UploadQueue.createQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) self.upload_queue = UploadQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) self.ingest_job.upload_queue = self.upload_queue.url # create the ingest queue IngestQueue.createQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) self.ingest_job.ingest_queue = IngestQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT).url # create the cleanup queue CleanupQueue.createQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) self.ingest_job.cleanup_queue = CleanupQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT).url self.generateUploadTasks() self.ingest_job.user_id = user_id self.ingest_job.save() return NDIngestJob.serialize(self.ingest_job._job) except Exception as e: print(e) raise NDWSError(e)
def setup_class(self): """Setup class parameters""" # create the tile index table. skip if it exists try: TileIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) CuboidIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) except Exception as e: pass self.tileindex_db = TileIndexDB(nd_proj.project_name, endpoint_url=settings.DYNAMO_ENDPOINT) self.tile_bucket = TileBucket(nd_proj.project_name, endpoint_url=settings.S3_ENDPOINT) [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0] supercuboid_key = 'testing' message_id = '123456' receipt_handle = 'testing123456' message = serializer.encodeDeleteMessage(supercuboid_key, message_id, receipt_handle) # insert message in the upload queue CleanupQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.cleanup_queue = CleanupQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.cleanup_queue.sendMessage(message) # receive message and upload object for z_index in range(self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1): tile_handle = cStringIO.StringIO() self.tile_bucket.putObject(tile_handle, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, z_index, message_id, receipt_handle)
def setup_class(self): """Setup class parameters""" if 'SQS_ENDPOINT' in dir(settings): self.endpoint_url = settings.SQS_ENDPOINT else: self.endpoint_url = None CleanupQueue.createQueue(nd_proj, endpoint_url=self.endpoint_url) self.cleanup_queue = CleanupQueue(nd_proj, endpoint_url=self.endpoint_url)
def setup_class(self): """Setup class parameters""" # create the tile index table. skip if it exists try: TileIndexDB.createTable(endpoint_url="http://localhost:8000") CuboidIndexDB.createTable(endpoint_url="http://localhost:8000") except Exception as e: pass self.tileindex_db = TileIndexDB(nd_proj.project_name, endpoint_url="http://localhost:8000") # create the tile bucket TileBucket.createBucket(endpoint_url="http://localhost:4567") self.tile_bucket = TileBucket(nd_proj.project_name, endpoint_url="http://localhost:4567") self.tiles = [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0] message_id = "testing" receipt_handle = "123456" # insert SUPER_CUBOID_SIZE tiles in the bucket for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1): tile_handle = cStringIO.StringIO() self.tile_bucket.putObject( tile_handle, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, z_index, message_id, receipt_handle, ) # creating the cuboid bucket CuboidBucket.createBucket(endpoint_url="http://localhost:4567") self.cuboid_bucket = CuboidBucket(nd_proj.project_name, endpoint_url="http://localhost:4567") # create the ingest queue IngestQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568") self.ingest_queue = IngestQueue(nd_proj, endpoint_url="http://localhost:4568") # send message to the ingest queue morton_index = XYZMorton(self.tiles) supercuboid_key = self.cuboid_bucket.generateSupercuboidKey( nd_proj.channel_name, nd_proj.resolution, morton_index) response = self.ingest_queue.sendMessage(supercuboid_key) # create the cleanup queue CleanupQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")
def test_sendBatchMessages(sqs): fake_data0 = {"foo": "bar"} fake_data1 = {"john": "doe"} jsonized0 = json.dumps(fake_data0) jsonized1 = json.dumps(fake_data1) md5_0 = hashlib.md5(jsonized0.encode("utf-8")).hexdigest() md5_1 = hashlib.md5(jsonized1.encode("utf-8")).hexdigest() proj = generate_proj() from ndingest.ndqueue.cleanupqueue import CleanupQueue CleanupQueue.createQueue(proj) cleanup_queue = CleanupQueue(proj) try: response = cleanup_queue.sendBatchMessages([fake_data0, fake_data1], 0) assert "Successful" in response success_ids = [] for msg_result in response["Successful"]: id = msg_result["Id"] success_ids.append(id) if id == "0": assert md5_0 == msg_result["MD5OfMessageBody"] elif id == "1": assert md5_1 == msg_result["MD5OfMessageBody"] assert "0" in success_ids assert "1" in success_ids finally: for message_id, receipt_handle, _ in cleanup_queue.receiveMessage(): cleanup_queue.deleteMessage(message_id, receipt_handle)
def deleteIngestJob(self, job_id): """Delete an ingest job based on job id""" try: ingest_job = NDIngestJob.fromId(job_id) nd_proj = NDIngestProj(ingest_job.project, ingest_job.channel, ingest_job.resolution) # delete the upload queue UploadQueue.deleteQueue( nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) # delete the ingest queue IngestQueue.deleteQueue( nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) # delete the cleanup queue CleanupQueue.deleteQueue( nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) ingest_job.status = INGEST_STATUS_DELETED ingest_job.save() except Exception as e: print(e) raise
class Test_Cleanup_Queue(): def setup_class(self): """Setup class parameters""" if 'SQS_ENDPOINT' in dir(settings): self.endpoint_url = settings.SQS_ENDPOINT else: self.endpoint_url = None CleanupQueue.createQueue(nd_proj, endpoint_url=self.endpoint_url) self.cleanup_queue = CleanupQueue(nd_proj, endpoint_url=self.endpoint_url) def teardown_class(self): """Teardown parameters""" CleanupQueue.deleteQueue(nd_proj, endpoint_url=self.endpoint_url) def test_Message(self): """Testing the upload queue""" supercuboid_key = 'kasthuri11&image&0&0' self.cleanup_queue.sendMessage(supercuboid_key) for message_id, receipt_handle, message_body in self.cleanup_queue.receiveMessage(): assert(supercuboid_key == message_body) response = self.cleanup_queue.deleteMessage(message_id, receipt_handle) assert('Successful' in response) def test_sendBatchMessages(self): fake_data0 = {'foo': 'bar'} fake_data1 = {'john': 'doe'} jsonized0 = json.dumps(fake_data0) jsonized1 = json.dumps(fake_data1) md5_0 = hashlib.md5(jsonized0.encode('utf-8')).hexdigest() md5_1 = hashlib.md5(jsonized1.encode('utf-8')).hexdigest() try: response = self.cleanup_queue.sendBatchMessages([fake_data0, fake_data1], 0) assert('Successful' in response) success_ids = [] for msg_result in response['Successful']: id = msg_result['Id'] success_ids.append(id) if id == '0': assert(md5_0 == msg_result['MD5OfMessageBody']) elif id == '1': assert(md5_1 == msg_result['MD5OfMessageBody']) assert('0' in success_ids) assert('1' in success_ids) finally: for message_id, receipt_handle, _ in self.cleanup_queue.receiveMessage(): self.cleanup_queue.deleteMessage(message_id, receipt_handle)
def test_message(sqs): """Testing the upload queue""" proj = generate_proj() from ndingest.ndqueue.cleanupqueue import CleanupQueue CleanupQueue.createQueue(proj) cleanup_queue = CleanupQueue(proj) supercuboid_key = "kasthuri11&image&0&0" cleanup_queue.sendMessage(supercuboid_key) for message_id, receipt_handle, message_body in cleanup_queue.receiveMessage(): assert supercuboid_key == message_body response = cleanup_queue.deleteMessage(message_id, receipt_handle) assert "Successful" in response
def teardown_class(self): """Teardown class parameters""" TileIndexDB.deleteTable(endpoint_url=settings.DYNAMO_ENDPOINT) CuboidIndexDB.deleteTable(endpoint_url=settings.DYNAMO_ENDPOINT) CleanupQueue.deleteQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT)
class Test_UploadLambda: def setup_class(self): """Setup class parameters""" # create the tile index table. skip if it exists try: TileIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) CuboidIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) except Exception as e: pass self.tileindex_db = TileIndexDB(nd_proj.project_name, endpoint_url=settings.DYNAMO_ENDPOINT) self.tile_bucket = TileBucket(nd_proj.project_name, endpoint_url=settings.S3_ENDPOINT) [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0] supercuboid_key = 'testing' message_id = '123456' receipt_handle = 'testing123456' message = serializer.encodeDeleteMessage(supercuboid_key, message_id, receipt_handle) # insert message in the upload queue CleanupQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.cleanup_queue = CleanupQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.cleanup_queue.sendMessage(message) # receive message and upload object for z_index in range(self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1): tile_handle = cStringIO.StringIO() self.tile_bucket.putObject(tile_handle, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, z_index, message_id, receipt_handle) def teardown_class(self): """Teardown class parameters""" TileIndexDB.deleteTable(endpoint_url=settings.DYNAMO_ENDPOINT) CuboidIndexDB.deleteTable(endpoint_url=settings.DYNAMO_ENDPOINT) CleanupQueue.deleteQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) def test_Uploadevent(self): """Testing the event""" # creating an emulambda function func = emulambda.import_lambda('cleanuplambda.lambda_handler') # creating an emulambda event event = emulambda.parse_event( open('../ndlambda/functions/cleanup/cleanup_event.json').read()) # calling the emulambda function to invoke a lambda emulambda.invoke_lambda(func, event, None, 0, None) # test if there are any tiles leftover in tile bucket for z_index in range(self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1): tile = self.tile_bucket.getObject(nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, z_index) assert (tile is None) # check if there are any entires left in the tileindex table supercuboid_key = self.tileindex_db.generatePrimaryKey( nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, self.z_tile) item = self.tileindex_db.getItem(supercuboid_key) assert (item is None) # testing if the message was deleted from the cleanup queue or not for message in self.cleanup_queue.receiveMessage(): # KL TODO write the message id into the JSON event file directly print message
def teardown_class(self): """Teardown parameters""" CleanupQueue.deleteQueue(nd_proj, endpoint_url=self.endpoint_url)