def test_generate_upload_tasks(self): """""" try: ingest_mgmr = IngestManager() ingest_job = ingest_mgmr.setup_ingest(self.user.id, self.example_config_data) ingest_mgmr.generate_upload_tasks(ingest_job.id) assert (ingest_job.collection == 'my_col_1') assert (ingest_job.experiment == 'my_exp_1') assert (ingest_job.channel == 'my_ch_1') # Pull the messages off the queue proj_class = BossIngestProj.load() nd_proj = proj_class(ingest_job.collection, ingest_job.experiment, ingest_job.channel, ingest_job.resolution, ingest_job.id) queue = UploadQueue(nd_proj, endpoint_url=None) tmp = queue.receiveMessage(number_of_messages=10) # receive message from the queue for message_id, receipt_handle, message_body in tmp: assert(message_body['job_id'] == ingest_job.id) # delete message from the queue response = queue.deleteMessage(message_id, receipt_handle) assert ('Successful' in response) ingest_mgmr.remove_ingest_credentials(ingest_job.id) except: raise finally: ingest_mgmr.delete_upload_queue() ingest_mgmr.delete_ingest_queue()
def delete_upload_queue(self): """ Returns: """ UploadQueue.deleteQueue(self.nd_proj, endpoint_url=None)
def test_create_queue_with_default_name(sqs): from ndingest.ndqueue.uploadqueue import UploadQueue proj = generate_proj() # Create upload queue. UploadQueue.createQueue(proj) upload_queue = UploadQueue(proj) # Create dead letter queue with default name. exp_max_receives = 4 dl_queue = upload_queue.addDeadLetterQueue(exp_max_receives) exp_name = upload_queue.queue_name + "-dlq" exp_arn = dl_queue.attributes["QueueArn"] try: policy = json.loads(upload_queue.queue.attributes["RedrivePolicy"]) assert exp_max_receives == policy["maxReceiveCount"] assert exp_arn == policy["deadLetterTargetArn"] # Confirm dead letter queue named correctly by looking at the end # of its ARN. assert dl_queue.attributes["QueueArn"].endswith(exp_name) finally: dl_queue.delete()
def createIngestJob(self, user_id, config_data): """Create an ingest job based on the posted config data""" config_data = json.loads(config_data) # validate schema if self.validateConfig(config_data): try: # create the upload queue UploadQueue.createQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) self.upload_queue = UploadQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) self.ingest_job.upload_queue = self.upload_queue.url # create the ingest queue IngestQueue.createQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) self.ingest_job.ingest_queue = IngestQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT).url # create the cleanup queue CleanupQueue.createQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) self.ingest_job.cleanup_queue = CleanupQueue( self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT).url self.generateUploadTasks() self.ingest_job.user_id = user_id self.ingest_job.save() return NDIngestJob.serialize(self.ingest_job._job) except Exception as e: print(e) raise NDWSError(e)
def test_delete_dead_letter_queue(sqs): from ndingest.ndqueue.uploadqueue import NDQueue from ndingest.ndqueue.uploadqueue import UploadQueue proj = generate_proj() # Create existing queue for dead letter queue. queue_name = "deadletter_test_{}".format(randint(1000, 9999)) existing_queue = sqs.create_queue( QueueName=queue_name, Attributes={ "DelaySeconds": "0", "MaximumMessageSize": "262144" }, ) # Create upload queue. arn = existing_queue.attributes["QueueArn"] UploadQueue.createQueue(proj) upload_queue = UploadQueue(proj) # Attach the dead letter queue to it. dl_queue = upload_queue.addDeadLetterQueue(2, arn) # Invoke the delete method. NDQueue.deleteDeadLetterQueue(sqs, upload_queue.queue) # Confirm deletion. with pytest.raises(botocore.exceptions.ClientError): sqs.get_queue_by_name(QueueName=queue_name)
def test_delete_dead_letter_queue(self): # Create existing queue for dead letter queue. sqs = boto3.resource( 'sqs', region_name=settings.REGION_NAME, endpoint_url=self.endpoint_url, aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY) queue_name = 'deadletter_test_{}'.format(randint(100, 999)) existing_queue = sqs.create_queue(QueueName=queue_name, Attributes={ 'DelaySeconds': '0', 'MaximumMessageSize': '262144' }) # Create upload queue. arn = existing_queue.attributes['QueueArn'] UploadQueue.createQueue(self.nd_proj, endpoint_url=self.endpoint_url) self.upload_queue = UploadQueue(self.nd_proj, endpoint_url=self.endpoint_url) # Attach the dead letter queue to it. dl_queue = self.upload_queue.addDeadLetterQueue(2, arn) # Invoke the delete method. NDQueue.deleteDeadLetterQueue(sqs, self.upload_queue.queue) # Confirm deletion. with self.assertRaises(botocore.exceptions.ClientError): sqs.get_queue_by_name(QueueName=queue_name)
def populateQueue(self, tile_size=1024, time_interval=1): """Populate the message queue""" # setup the queue queue_name = UploadQueue.createQueue([self.proj.getProjectName(), self.channel, str(self.resolution)]) upload_queue = UploadQueue([self.proj.getProjectName(), self.channel, str(self.resolution)]) # load the image sizes [[ximage_size, yimage_size, zimage_size],(start_time, end_time)] = self.proj.datasetcfg.imageSize(self.resolution) # load the image offsets [x_offset, y_offset, z_offset] = self.proj.datasetcfg.getOffset()[self.resolution] # calculate the number of tiles # TODO KL account for xoffset and yoffset here num_xtiles = ximage_size / tile_size num_ytiles = yimage_size / tile_size # iterate over time for time in range(start_time, end_time+1, time_interval): # iterate over the x and y range for ytile in range(0, num_ytiles, 1): for xtile in range(0, num_xtiles, 1): # iterate over zrange for ztile in range(z_offset, zimage_size, 1): time_range = None if end_time - start_time == 0 else [time, time_interval] # generate a message for each one print("inserting message:x{}y{}z{}".format(xtile, ytile, ztile)) message = UploadMessage.encode(self.proj.getProjectName(), self.channel, self.resolution, xtile, ytile, ztile, time_range) response = upload_queue.sendMessage(message) print(response) return queue_name
def delete_upload_queue(self): """ Delete the current upload queue Returns: None """ UploadQueue.deleteQueue(self.nd_proj, endpoint_url=None)
def delete_upload_queue(self): """ Delete the current upload queue Returns: None """ UploadQueue.deleteQueue(self.nd_proj, endpoint_url=None)
def create_upload_queue(self): """ Returns: """ UploadQueue.createQueue(self.nd_proj, endpoint_url=None) queue = UploadQueue(self.nd_proj, endpoint_url=None) return queue
def create_upload_queue(self): """ Create an upload queue for an ingest job using the ndingest library Returns: UploadQueue : Returns a upload queue object """ UploadQueue.createQueue(self.nd_proj, endpoint_url=None) queue = UploadQueue(self.nd_proj, endpoint_url=None) return queue
def create_upload_queue(self): """ Create an upload queue for an ingest job using the ndingest library Returns: UploadQueue : Returns a upload queue object """ UploadQueue.createQueue(self.nd_proj, endpoint_url=None) queue = UploadQueue(self.nd_proj, endpoint_url=None) return queue
def send_upload_task_message(self, msg): """ Args: msg: Returns: """ queue = UploadQueue(self.nd_proj, endpoint_url=None) queue.sendMessage(msg)
def setUpClass(cls): # Suppress warnings about Boto3's unclosed sockets. warnings.simplefilter('ignore') # Use ndingest in test mode. os.environ['NDINGEST_TEST'] = '1' cls.job_id = 125 cls.nd_proj = BossIngestProj('testCol', 'kasthuri11', 'image', 0, cls.job_id) UploadQueue.createQueue(cls.nd_proj) cls.upload_queue = UploadQueue(cls.nd_proj)
def setUpClass(cls): # Suppress warnings about Boto3's unclosed sockets. warnings.simplefilter('ignore') # Use ndingest in test mode. os.environ['NDINGEST_TEST'] = '1' cls.job_id = 125 cls.nd_proj = BossIngestProj('testCol', 'kasthuri11', 'image', 0, cls.job_id) UploadQueue.createQueue(cls.nd_proj) cls.upload_queue = UploadQueue(cls.nd_proj)
def send_upload_message_batch(self, list_msg): """ Upload a batch of 10 messages to the upload queue. An error is raised if more than 10 messages are in the batch Args: list_msg: The list containing the messages to upload Returns: None """ queue = UploadQueue(self.nd_proj, endpoint_url=None) status = queue.sendBatchMessages(list_msg) return status
def send_upload_task_message(self, msg): """ Upload one message to the upload queue (Note : Currently not used. Replaced with the send_upload_message_batch) Args: msg: Message to send to the upload queue Returns: None """ queue = UploadQueue(self.nd_proj, endpoint_url=None) queue.sendMessage(msg)
def test_sendBatchMessages(sqs): fake_data0 = {"foo": "bar"} fake_data1 = {"john": "doe"} jsonized0 = json.dumps(fake_data0) jsonized1 = json.dumps(fake_data1) md5_0 = hashlib.md5(jsonized0.encode("utf-8")).hexdigest() md5_1 = hashlib.md5(jsonized1.encode("utf-8")).hexdigest() proj = generate_proj() from ndingest.ndqueue.uploadqueue import UploadQueue UploadQueue.createQueue(proj) upload_queue = UploadQueue(proj) try: response = upload_queue.sendBatchMessages([jsonized0, jsonized1], 0) assert "Successful" in response success_ids = [] for msg_result in response["Successful"]: id = msg_result["Id"] success_ids.append(id) if id == "0": assert md5_0 == msg_result["MD5OfMessageBody"] elif id == "1": assert md5_1 == msg_result["MD5OfMessageBody"] assert "0" in success_ids assert "1" in success_ids finally: for message_id, receipt_handle, _ in upload_queue.receiveMessage(): upload_queue.deleteMessage(message_id, receipt_handle)
def test_not_creator_admin(self): """Method to test only creators or admins can interact with ingest jobs""" config_data = self.setup_helper.get_ingest_config_data_dict() config_data = json.loads(json.dumps(config_data)) # # Post the data url = '/' + version + '/ingest/' response = self.client.post(url, data=config_data, format='json') assert (response.status_code == 201) # Check if the queue's exist ingest_job = response.json() proj_class = BossIngestProj.load() nd_proj = proj_class(ingest_job['collection'], ingest_job['experiment'], ingest_job['channel'], 0, ingest_job['id']) self.nd_proj = nd_proj upload_queue = UploadQueue(nd_proj, endpoint_url=None) assert (upload_queue is not None) ingest_queue = IngestQueue(nd_proj, endpoint_url=None) assert (ingest_queue is not None) # Log in as the admin and create a job self.client.force_login(self.superuser) # Test joining the job url = '/' + version + '/ingest/{}/'.format(ingest_job['id']) response = self.client.get(url) assert (response.status_code == 200) assert (response.json()['ingest_job']['id'] == ingest_job['id']) assert("credentials" in response.json()) # # Delete the job url = '/' + version + '/ingest/{}/'.format(ingest_job['id']) response = self.client.delete(url) assert (response.status_code == 204)
def test_setup_ingest(self): """Method to test the setup_ingest method""" try: ingest_mgmr = IngestManager() ingest_job = ingest_mgmr.setup_ingest(self.user.id, self.example_config_data) assert (ingest_job is not None) # Check if the queue's exist proj_class = BossIngestProj.load() nd_proj = proj_class(ingest_job.collection, ingest_job.experiment, ingest_job.channel, ingest_job.resolution, ingest_job.id) ingest_mgmr.nd_proj = nd_proj upload_queue = UploadQueue(nd_proj, endpoint_url=None) assert (upload_queue is not None) ingest_queue = IngestQueue(nd_proj, endpoint_url=None) assert (ingest_queue is not None) ingest_mgmr.remove_ingest_credentials(ingest_job.id) except: raise finally: ingest_mgmr.delete_upload_queue() ingest_mgmr.delete_ingest_queue()
def test_post_new_ingest_job(self): """ Test view to create a new ingest job """ config_data = self.setup_helper.get_ingest_config_data_dict() config_data = json.loads(json.dumps(config_data)) # # Post the data url = '/' + version + '/ingest/' response = self.client.post(url, data=config_data, format='json') assert (response.status_code == 201) # Check if the queue's exist ingest_job = response.json() proj_class = BossIngestProj.load() nd_proj = proj_class(ingest_job['collection'], ingest_job['experiment'], ingest_job['channel_layer'], 0, ingest_job['id']) self.nd_proj = nd_proj upload_queue = UploadQueue(nd_proj, endpoint_url=None) assert (upload_queue is not None) ingest_queue = IngestQueue(nd_proj, endpoint_url=None) assert (ingest_queue is not None) # # Delete the job url = '/' + version + '/ingest/{}/'.format(ingest_job['id']) response = self.client.delete(url) assert (response.status_code == 204)
def setup_class(self): """Setup class parameters""" # create the tile index table. skip if it exists try: TileIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) except Exception as e: pass self.tileindex_db = TileIndexDB(nd_proj.project_name, endpoint_url=settings.DYNAMO_ENDPOINT) # create the ingest queue IngestQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) # create the upload queue UploadQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.upload_queue = UploadQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) tile_bucket = TileBucket(nd_proj.project_name, endpoint_url=settings.S3_ENDPOINT) [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0] message = serializer.encodeUploadMessage( nd_proj.project_name, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, self.z_tile, ) # insert message in the upload queue self.upload_queue.sendMessage(message) # receive message and upload object for ( message_id, receipt_handle, message_body, ) in self.upload_queue.receiveMessage(): tile_handle = cStringIO.StringIO() tile_bucket.putObject( tile_handle, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, self.z_tile, message_id, receipt_handle, )
def test_post_new_volumetric_ingest_job(self): """ Test view to create a new volumetric_ingest job """ config_data = self.setup_helper.get_ingest_config_data_dict_volumetric( ) config_data = json.loads(json.dumps(config_data)) # # Post the data url = '/' + version + '/ingest/' response = self.client.post(url, data=config_data, format='json') self.assertEqual(201, response.status_code) # Check if the queue's exist ingest_job = response.json() proj_class = BossIngestProj.load() nd_proj = proj_class(ingest_job['collection'], ingest_job['experiment'], ingest_job['channel'], 0, ingest_job['id']) self.nd_proj = nd_proj upload_queue = UploadQueue(nd_proj, endpoint_url=None) self.assertIsNotNone(upload_queue) # There shouldn't be an ingest queue for a volumetric ingest with self.assertRaises(ClientError): IngestQueue(nd_proj, endpoint_url=None) # Test joining the job url = '/' + version + '/ingest/{}/'.format(ingest_job['id']) response = self.client.get(url) self.assertEqual(response.json()['ingest_job']['id'], ingest_job['id']) self.assertIn("credentials", response.json()) # # Delete the job url = '/' + version + '/ingest/{}/'.format(ingest_job['id']) response = self.client.delete(url) self.assertEqual(204, response.status_code) # Verify Queues are removed with self.assertRaises(ClientError): UploadQueue(nd_proj, endpoint_url=None) # Verify the job is deleted url = '/' + version + '/ingest/{}/status'.format(ingest_job['id']) response = self.client.get(url) self.assertEqual(response.status_code, 404)
def enqueue_msgs(fp): """Parse given messages and send to SQS queue. Args: fp (file-like-object): File-like-object containing a header and messages. """ read_header = False msgs = [] upload_queue = None lineNum = 0 for line in fp: lineNum += 1 if not read_header: header = json.loads(line) if 'upload_queue_url' not in header: raise KeyError('Expected upload_queue_url in header') if 'ingest_queue_url' not in header: raise KeyError('Expected ingest_queue_url in header') if 'job_id' not in header: raise KeyError('Expected job_id in header') read_header = True continue try: msgs.append(parse_line(header, line)) except: print('Error parsing line {}: {}'.format(lineNum, line)) if len(msgs) == 1 and upload_queue is None: # Instantiate the upload queue object. asDict = json.loads(msgs[0]) boss_ingest_proj = BossIngestProj.fromTileKey(asDict['tile_key']) boss_ingest_proj.job_id = header['job_id'] upload_queue = UploadQueue(boss_ingest_proj) if len(msgs) >= MAX_BATCH_MSGS: # Enqueue messages. upload_queue.sendBatchMessages(msgs) msgs = [] if len(msgs) > 0: # Final enqueue messages of remaining messages. upload_queue.sendBatchMessages(msgs)
def test_complete_ingest_job(self): """ Test view to create a new ingest job """ config_data = self.setup_helper.get_ingest_config_data_dict() config_data = json.loads(json.dumps(config_data)) # # Post the data url = '/' + version + '/ingest/' response = self.client.post(url, data=config_data, format='json') self.assertEqual(response.status_code, 201) # Check if the queues exist ingest_job = response.json() proj_class = BossIngestProj.load() nd_proj = proj_class(ingest_job['collection'], ingest_job['experiment'], ingest_job['channel'], 0, ingest_job['id']) self.nd_proj = nd_proj upload_queue = UploadQueue(nd_proj, endpoint_url=None) self.assertIsNotNone(upload_queue) ingest_queue = IngestQueue(nd_proj, endpoint_url=None) self.assertIsNotNone(ingest_queue) # Test joining the job url = '/' + version + '/ingest/{}/'.format(ingest_job['id']) response = self.client.get(url) self.assertEqual(response.json()['ingest_job']['id'], ingest_job['id']) self.assertIn("credentials", response.json()) # Complete the job url = '/' + version + '/ingest/{}/complete'.format(ingest_job['id']) response = self.client.post(url) # Can't complete until it is done self.assertEqual(400, response.status_code) # Wait for job to complete print('trying to join job') for cnt in range(0, 30): # Try joining to kick the status url = '/' + version + '/ingest/{}/'.format(ingest_job['id']) self.client.get(url) url = '/' + version + '/ingest/{}/status'.format(ingest_job['id']) response = self.client.get(url) if response.json()["status"] == IngestJob.UPLOADING: break time.sleep(10) print('completing') # Complete the job url = '/' + version + '/ingest/{}/complete'.format(ingest_job['id']) response = self.client.post(url) self.assertEqual(204, response.status_code)
def deleteIngestJob(self, job_id): """Delete an ingest job based on job id""" try: ingest_job = NDIngestJob.fromId(job_id) nd_proj = NDIngestProj(ingest_job.project, ingest_job.channel, ingest_job.resolution) # delete the upload queue UploadQueue.deleteQueue( nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) # delete the ingest queue IngestQueue.deleteQueue( nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) # delete the cleanup queue CleanupQueue.deleteQueue( nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT) ingest_job.status = INGEST_STATUS_DELETED ingest_job.save() except Exception as e: print(e) raise
def boss_util_fixtures(tile_bucket, sqs): job_id = 123 nd_proj = BossIngestProj("testCol", "kasthuri11", "image", 0, job_id) from ndingest.ndqueue.uploadqueue import UploadQueue UploadQueue.createQueue(nd_proj) upload_queue = UploadQueue(nd_proj) from ndingest.ndqueue.tileindexqueue import TileIndexQueue TileIndexQueue.createQueue(nd_proj) tile_index_queue = TileIndexQueue(nd_proj) def get_test_data(): return (nd_proj, upload_queue, tile_index_queue, tile_bucket) yield get_test_data UploadQueue.deleteQueue(nd_proj) TileIndexQueue.deleteQueue(nd_proj)
def test_add_existing_queue_as_dead_letter_queue(self): # Create existing queue for dead letter queue. sqs = boto3.resource( 'sqs', region_name=settings.REGION_NAME, endpoint_url=self.endpoint_url, aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY) queue_name = 'deadletter_test_{}'.format(randint(100, 999)) existing_queue = sqs.create_queue(QueueName=queue_name, Attributes={ 'DelaySeconds': '0', 'MaximumMessageSize': '262144' }) exp_arn = existing_queue.attributes['QueueArn'] try: # Create upload queue. UploadQueue.createQueue(self.nd_proj, endpoint_url=self.endpoint_url) self.upload_queue = UploadQueue(self.nd_proj, endpoint_url=self.endpoint_url) # Attach the dead letter queue to it. exp_max_receives = 5 dl_queue = self.upload_queue.addDeadLetterQueue( exp_max_receives, exp_arn) # Confirm policy settings. policy = json.loads( self.upload_queue.queue.attributes['RedrivePolicy']) self.assertEqual(exp_max_receives, policy['maxReceiveCount']) self.assertEqual(exp_arn, policy['deadLetterTargetArn']) # Confirm dead letter queue is the one created at the beginning # of test. self.assertEqual(existing_queue.url, dl_queue.url) finally: existing_queue.delete()
def get_ingest_job_upload_queue(self, ingest_job): """ Return the upload queue for an ingest job Args: ingest_job: Ingest job model Returns: Ndingest.uploadqueue """ proj_class = BossIngestProj.load() self.nd_proj = proj_class(ingest_job.collection, ingest_job.experiment, ingest_job.channel, ingest_job.resolution, ingest_job.id) queue = UploadQueue(self.nd_proj, endpoint_url=None) return queue
def test_create_queue_with_default_name(self): # Create upload queue. UploadQueue.createQueue(self.nd_proj, endpoint_url=self.endpoint_url) self.upload_queue = UploadQueue(self.nd_proj, endpoint_url=self.endpoint_url) # Create dead letter queue with default name. exp_max_receives = 4 dl_queue = self.upload_queue.addDeadLetterQueue(exp_max_receives) exp_name = self.upload_queue.queue_name + '_dead_letter' exp_arn = dl_queue.attributes['QueueArn'] try: policy = json.loads( self.upload_queue.queue.attributes['RedrivePolicy']) self.assertEqual(exp_max_receives, policy['maxReceiveCount']) self.assertEqual(exp_arn, policy['deadLetterTargetArn']) # Confirm dead letter queue named correctly by looking at the end # of its ARN. self.assertTrue(dl_queue.attributes['QueueArn'].endswith(exp_name)) finally: dl_queue.delete()
def test_add_existing_queue_as_dead_letter_queue(sqs): from ndingest.ndqueue.uploadqueue import UploadQueue proj = generate_proj() # Create existing queue for dead letter queue. queue_name = "deadletter_test_{}".format(randint(1000, 9999)) existing_queue = sqs.create_queue( QueueName=queue_name, Attributes={ "DelaySeconds": "0", "MaximumMessageSize": "262144" }, ) exp_arn = existing_queue.attributes["QueueArn"] try: # Create upload queue. UploadQueue.createQueue(proj) upload_queue = UploadQueue(proj) # Attach the dead letter queue to it. exp_max_receives = 5 dl_queue = upload_queue.addDeadLetterQueue(exp_max_receives, exp_arn) # Confirm policy settings. policy = json.loads(upload_queue.queue.attributes["RedrivePolicy"]) assert exp_max_receives == policy["maxReceiveCount"] assert exp_arn == policy["deadLetterTargetArn"] # Confirm dead letter queue is the one created at the beginning # of test. assert existing_queue.url == dl_queue.url finally: existing_queue.delete()
def test_message(sqs): """Test put, get and delete Message""" x_tile = 0 y_tile = 0 proj = generate_proj() from ndingest.ndqueue.uploadqueue import UploadQueue UploadQueue.createQueue(proj) upload_queue = UploadQueue(proj) for z_tile in range(0, 2, 1): # encode the message message = serializer.encodeUploadMessage( proj.project_name, proj.channel_name, proj.resolution, x_tile, y_tile, z_tile, ) # send message to the queue upload_queue.sendMessage(message) # receive message from the queue for message_id, receipt_handle, message_body in upload_queue.receiveMessage( number_of_messages=3 ): # check if we get the tile_info back correctly assert message_body["z_tile"] in [0, 1, 2] # delete message from the queue response = upload_queue.deleteMessage(message_id, receipt_handle) # check if the message was sucessfully deleted assert "Successful" in response
def enqueue_msgs(fp): """Parse given messages and send to SQS queue. Args: fp (file-like-object): File-like-object containing a header and messages. """ read_header = False msgs = [] upload_queue = None lineNum = 0 for line in fp: lineNum += 1 if not read_header: header = json.loads(line) if 'upload_queue_url' not in header: raise KeyError('Expected upload_queue_url in header') if 'ingest_queue_url' not in header: raise KeyError('Expected ingest_queue_url in header') if 'job_id' not in header: raise KeyError('Expected job_id in header') read_header = True continue try: msgs.append(parse_line(header, line)) except: print('Error parsing line {}: {}'.format(lineNum, line)) if len(msgs) == 1 and upload_queue is None: # Instantiate the upload queue object. asDict = json.loads(msgs[0]) boss_ingest_proj = BossIngestProj.fromTileKey(asDict['tile_key']) boss_ingest_proj.job_id = header['job_id'] upload_queue = UploadQueue(boss_ingest_proj) if len(msgs) >= MAX_BATCH_MSGS: # Enqueue messages. upload_queue.sendBatchMessages(msgs) msgs = [] if len(msgs) > 0: # Final enqueue messages of remaining messages. upload_queue.sendBatchMessages(msgs)
def test_createPolicy(sqs, iam): """Test policy creation""" proj = generate_proj() from ndingest.ndqueue.uploadqueue import UploadQueue UploadQueue.createQueue(proj) upload_queue = UploadQueue(proj) statements = [ { "Sid": "ReceiveAccessStatement", "Effect": "Allow", "Action": ["sqs:ReceiveMessage"], } ] expName = upload_queue.generateQueueName(proj) expDesc = "Test policy creation" actual = upload_queue.createPolicy(statements, description=expDesc) try: assert expName == actual.policy_name assert expDesc == actual.description assert settings.IAM_POLICY_PATH == actual.path # Confirm resource set correctly to the upload queue. statements = actual.default_version.document["Statement"] arn = upload_queue.queue.attributes["QueueArn"] for stmt in statements: assert stmt["Resource"] == arn finally: actual.delete()
def tearDownClass(cls): UploadQueue.deleteQueue(cls.nd_proj)