Exemple #1
0
    def createIngestJob(self, user_id, config_data):
        """Create an ingest job based on the posted config data"""

        config_data = json.loads(config_data)
        # validate schema
        if self.validateConfig(config_data):
            try:
                # create the upload queue
                UploadQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.upload_queue = UploadQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.upload_queue = self.upload_queue.url
                # create the ingest queue
                IngestQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.ingest_queue = IngestQueue(
                    self.nd_proj,
                    endpoint_url=ndingest_settings.SQS_ENDPOINT).url
                # create the cleanup queue
                CleanupQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.cleanup_queue = CleanupQueue(
                    self.nd_proj,
                    endpoint_url=ndingest_settings.SQS_ENDPOINT).url
                self.generateUploadTasks()
                self.ingest_job.user_id = user_id
                self.ingest_job.save()
                return NDIngestJob.serialize(self.ingest_job._job)

            except Exception as e:
                print(e)
                raise NDWSError(e)
def test_create_queue_with_default_name(sqs):
    from ndingest.ndqueue.uploadqueue import UploadQueue

    proj = generate_proj()

    # Create upload queue.
    UploadQueue.createQueue(proj)
    upload_queue = UploadQueue(proj)

    # Create dead letter queue with default name.
    exp_max_receives = 4
    dl_queue = upload_queue.addDeadLetterQueue(exp_max_receives)

    exp_name = upload_queue.queue_name + "-dlq"
    exp_arn = dl_queue.attributes["QueueArn"]

    try:
        policy = json.loads(upload_queue.queue.attributes["RedrivePolicy"])
        assert exp_max_receives == policy["maxReceiveCount"]
        assert exp_arn == policy["deadLetterTargetArn"]
        # Confirm dead letter queue named correctly by looking at the end
        # of its ARN.
        assert dl_queue.attributes["QueueArn"].endswith(exp_name)
    finally:
        dl_queue.delete()
def test_delete_dead_letter_queue(sqs):
    from ndingest.ndqueue.uploadqueue import NDQueue
    from ndingest.ndqueue.uploadqueue import UploadQueue

    proj = generate_proj()

    # Create existing queue for dead letter queue.
    queue_name = "deadletter_test_{}".format(randint(1000, 9999))
    existing_queue = sqs.create_queue(
        QueueName=queue_name,
        Attributes={
            "DelaySeconds": "0",
            "MaximumMessageSize": "262144"
        },
    )

    # Create upload queue.
    arn = existing_queue.attributes["QueueArn"]
    UploadQueue.createQueue(proj)
    upload_queue = UploadQueue(proj)

    # Attach the dead letter queue to it.
    dl_queue = upload_queue.addDeadLetterQueue(2, arn)

    # Invoke the delete method.
    NDQueue.deleteDeadLetterQueue(sqs, upload_queue.queue)

    # Confirm deletion.
    with pytest.raises(botocore.exceptions.ClientError):
        sqs.get_queue_by_name(QueueName=queue_name)
def test_sendBatchMessages(sqs):
    fake_data0 = {"foo": "bar"}
    fake_data1 = {"john": "doe"}
    jsonized0 = json.dumps(fake_data0)
    jsonized1 = json.dumps(fake_data1)
    md5_0 = hashlib.md5(jsonized0.encode("utf-8")).hexdigest()
    md5_1 = hashlib.md5(jsonized1.encode("utf-8")).hexdigest()

    proj = generate_proj()

    from ndingest.ndqueue.uploadqueue import UploadQueue

    UploadQueue.createQueue(proj)
    upload_queue = UploadQueue(proj)

    try:
        response = upload_queue.sendBatchMessages([jsonized0, jsonized1], 0)
        assert "Successful" in response
        success_ids = []
        for msg_result in response["Successful"]:
            id = msg_result["Id"]
            success_ids.append(id)
            if id == "0":
                assert md5_0 == msg_result["MD5OfMessageBody"]
            elif id == "1":
                assert md5_1 == msg_result["MD5OfMessageBody"]

        assert "0" in success_ids
        assert "1" in success_ids
    finally:
        for message_id, receipt_handle, _ in upload_queue.receiveMessage():
            upload_queue.deleteMessage(message_id, receipt_handle)
Exemple #5
0
    def test_delete_dead_letter_queue(self):
        # Create existing queue for dead letter queue.
        sqs = boto3.resource(
            'sqs',
            region_name=settings.REGION_NAME,
            endpoint_url=self.endpoint_url,
            aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
            aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
        queue_name = 'deadletter_test_{}'.format(randint(100, 999))
        existing_queue = sqs.create_queue(QueueName=queue_name,
                                          Attributes={
                                              'DelaySeconds': '0',
                                              'MaximumMessageSize': '262144'
                                          })

        # Create upload queue.
        arn = existing_queue.attributes['QueueArn']
        UploadQueue.createQueue(self.nd_proj, endpoint_url=self.endpoint_url)
        self.upload_queue = UploadQueue(self.nd_proj,
                                        endpoint_url=self.endpoint_url)

        # Attach the dead letter queue to it.
        dl_queue = self.upload_queue.addDeadLetterQueue(2, arn)

        # Invoke the delete method.
        NDQueue.deleteDeadLetterQueue(sqs, self.upload_queue.queue)

        # Confirm deletion.
        with self.assertRaises(botocore.exceptions.ClientError):
            sqs.get_queue_by_name(QueueName=queue_name)
Exemple #6
0
    def create_upload_queue(self):
        """

        Returns:

        """
        UploadQueue.createQueue(self.nd_proj, endpoint_url=None)
        queue = UploadQueue(self.nd_proj, endpoint_url=None)
        return queue
Exemple #7
0
    def create_upload_queue(self):
        """
        Create an upload queue for an ingest job using the ndingest library
        Returns:
            UploadQueue : Returns a upload queue object

        """
        UploadQueue.createQueue(self.nd_proj, endpoint_url=None)
        queue = UploadQueue(self.nd_proj, endpoint_url=None)
        return queue
    def create_upload_queue(self):
        """
        Create an upload queue for an ingest job using the ndingest library
        Returns:
            UploadQueue : Returns a upload queue object

        """
        UploadQueue.createQueue(self.nd_proj, endpoint_url=None)
        queue = UploadQueue(self.nd_proj, endpoint_url=None)
        return queue
    def setUpClass(cls):
        # Suppress warnings about Boto3's unclosed sockets.
        warnings.simplefilter('ignore') 

        # Use ndingest in test mode.
        os.environ['NDINGEST_TEST'] = '1'

        cls.job_id = 125
        cls.nd_proj = BossIngestProj('testCol', 'kasthuri11', 'image', 0, cls.job_id)
        UploadQueue.createQueue(cls.nd_proj)
        cls.upload_queue = UploadQueue(cls.nd_proj)
Exemple #10
0
    def setUpClass(cls):
        # Suppress warnings about Boto3's unclosed sockets.
        warnings.simplefilter('ignore')

        # Use ndingest in test mode.
        os.environ['NDINGEST_TEST'] = '1'

        cls.job_id = 125
        cls.nd_proj = BossIngestProj('testCol', 'kasthuri11', 'image', 0,
                                     cls.job_id)
        UploadQueue.createQueue(cls.nd_proj)
        cls.upload_queue = UploadQueue(cls.nd_proj)
Exemple #11
0
  def populateQueue(self, tile_size=1024, time_interval=1):
    """Populate the message queue"""
    
    # setup the queue
    queue_name = UploadQueue.createQueue([self.proj.getProjectName(), self.channel, str(self.resolution)])
    upload_queue = UploadQueue([self.proj.getProjectName(), self.channel, str(self.resolution)])
    
    # load the image sizes
    [[ximage_size, yimage_size, zimage_size],(start_time, end_time)] = self.proj.datasetcfg.imageSize(self.resolution)
    # load the image offsets
    [x_offset, y_offset, z_offset] = self.proj.datasetcfg.getOffset()[self.resolution]

    # calculate the number of tiles
    # TODO KL account for xoffset and yoffset here
    num_xtiles = ximage_size / tile_size
    num_ytiles = yimage_size / tile_size
    
    # iterate over time
    for time in range(start_time, end_time+1, time_interval):
    
      # iterate over the x and y range
      for ytile in range(0, num_ytiles, 1):
        for xtile in range(0, num_xtiles, 1):
        
          # iterate over zrange
          for ztile in range(z_offset, zimage_size, 1):
            
            time_range = None if end_time - start_time == 0 else [time, time_interval]
            # generate a message for each one
            print("inserting message:x{}y{}z{}".format(xtile, ytile, ztile))
            message = UploadMessage.encode(self.proj.getProjectName(), self.channel, self.resolution, xtile, ytile, ztile, time_range)
            response = upload_queue.sendMessage(message)
            print(response)

    return queue_name
Exemple #12
0
 def setup_class(self):
     """Setup class parameters"""
     # create the tile index table. skip if it exists
     try:
         TileIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT)
     except Exception as e:
         pass
     self.tileindex_db = TileIndexDB(nd_proj.project_name,
                                     endpoint_url=settings.DYNAMO_ENDPOINT)
     # create the ingest queue
     IngestQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT)
     # create the upload queue
     UploadQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT)
     self.upload_queue = UploadQueue(nd_proj,
                                     endpoint_url=settings.SQS_ENDPOINT)
     tile_bucket = TileBucket(nd_proj.project_name,
                              endpoint_url=settings.S3_ENDPOINT)
     [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0]
     message = serializer.encodeUploadMessage(
         nd_proj.project_name,
         nd_proj.channel_name,
         nd_proj.resolution,
         self.x_tile,
         self.y_tile,
         self.z_tile,
     )
     # insert message in the upload queue
     self.upload_queue.sendMessage(message)
     # receive message and upload object
     for (
             message_id,
             receipt_handle,
             message_body,
     ) in self.upload_queue.receiveMessage():
         tile_handle = cStringIO.StringIO()
         tile_bucket.putObject(
             tile_handle,
             nd_proj.channel_name,
             nd_proj.resolution,
             self.x_tile,
             self.y_tile,
             self.z_tile,
             message_id,
             receipt_handle,
         )
Exemple #13
0
def boss_util_fixtures(tile_bucket, sqs):
    job_id = 123
    nd_proj = BossIngestProj("testCol", "kasthuri11", "image", 0, job_id)

    from ndingest.ndqueue.uploadqueue import UploadQueue

    UploadQueue.createQueue(nd_proj)
    upload_queue = UploadQueue(nd_proj)

    from ndingest.ndqueue.tileindexqueue import TileIndexQueue

    TileIndexQueue.createQueue(nd_proj)
    tile_index_queue = TileIndexQueue(nd_proj)

    def get_test_data():
        return (nd_proj, upload_queue, tile_index_queue, tile_bucket)

    yield get_test_data

    UploadQueue.deleteQueue(nd_proj)
    TileIndexQueue.deleteQueue(nd_proj)
Exemple #14
0
    def test_add_existing_queue_as_dead_letter_queue(self):
        # Create existing queue for dead letter queue.
        sqs = boto3.resource(
            'sqs',
            region_name=settings.REGION_NAME,
            endpoint_url=self.endpoint_url,
            aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
            aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
        queue_name = 'deadletter_test_{}'.format(randint(100, 999))
        existing_queue = sqs.create_queue(QueueName=queue_name,
                                          Attributes={
                                              'DelaySeconds': '0',
                                              'MaximumMessageSize': '262144'
                                          })

        exp_arn = existing_queue.attributes['QueueArn']

        try:
            # Create upload queue.
            UploadQueue.createQueue(self.nd_proj,
                                    endpoint_url=self.endpoint_url)
            self.upload_queue = UploadQueue(self.nd_proj,
                                            endpoint_url=self.endpoint_url)

            # Attach the dead letter queue to it.
            exp_max_receives = 5
            dl_queue = self.upload_queue.addDeadLetterQueue(
                exp_max_receives, exp_arn)

            # Confirm policy settings.
            policy = json.loads(
                self.upload_queue.queue.attributes['RedrivePolicy'])
            self.assertEqual(exp_max_receives, policy['maxReceiveCount'])
            self.assertEqual(exp_arn, policy['deadLetterTargetArn'])

            # Confirm dead letter queue is the one created at the beginning
            # of test.
            self.assertEqual(existing_queue.url, dl_queue.url)
        finally:
            existing_queue.delete()
Exemple #15
0
    def test_create_queue_with_default_name(self):
        # Create upload queue.
        UploadQueue.createQueue(self.nd_proj, endpoint_url=self.endpoint_url)
        self.upload_queue = UploadQueue(self.nd_proj,
                                        endpoint_url=self.endpoint_url)

        # Create dead letter queue with default name.
        exp_max_receives = 4
        dl_queue = self.upload_queue.addDeadLetterQueue(exp_max_receives)

        exp_name = self.upload_queue.queue_name + '_dead_letter'
        exp_arn = dl_queue.attributes['QueueArn']

        try:
            policy = json.loads(
                self.upload_queue.queue.attributes['RedrivePolicy'])
            self.assertEqual(exp_max_receives, policy['maxReceiveCount'])
            self.assertEqual(exp_arn, policy['deadLetterTargetArn'])
            # Confirm dead letter queue named correctly by looking at the end
            # of its ARN.
            self.assertTrue(dl_queue.attributes['QueueArn'].endswith(exp_name))
        finally:
            dl_queue.delete()
def test_message(sqs):
    """Test put, get and delete Message"""

    x_tile = 0
    y_tile = 0

    proj = generate_proj()

    from ndingest.ndqueue.uploadqueue import UploadQueue

    UploadQueue.createQueue(proj)
    upload_queue = UploadQueue(proj)

    for z_tile in range(0, 2, 1):
        # encode the message
        message = serializer.encodeUploadMessage(
            proj.project_name,
            proj.channel_name,
            proj.resolution,
            x_tile,
            y_tile,
            z_tile,
        )
        # send message to the queue
        upload_queue.sendMessage(message)

    # receive message from the queue
    for message_id, receipt_handle, message_body in upload_queue.receiveMessage(
        number_of_messages=3
    ):
        # check if we get the tile_info back correctly
        assert message_body["z_tile"] in [0, 1, 2]
        # delete message from the queue
        response = upload_queue.deleteMessage(message_id, receipt_handle)
        # check if the message was sucessfully deleted
        assert "Successful" in response
def test_add_existing_queue_as_dead_letter_queue(sqs):
    from ndingest.ndqueue.uploadqueue import UploadQueue

    proj = generate_proj()

    # Create existing queue for dead letter queue.
    queue_name = "deadletter_test_{}".format(randint(1000, 9999))
    existing_queue = sqs.create_queue(
        QueueName=queue_name,
        Attributes={
            "DelaySeconds": "0",
            "MaximumMessageSize": "262144"
        },
    )

    exp_arn = existing_queue.attributes["QueueArn"]

    try:
        # Create upload queue.
        UploadQueue.createQueue(proj)
        upload_queue = UploadQueue(proj)

        # Attach the dead letter queue to it.
        exp_max_receives = 5
        dl_queue = upload_queue.addDeadLetterQueue(exp_max_receives, exp_arn)

        # Confirm policy settings.
        policy = json.loads(upload_queue.queue.attributes["RedrivePolicy"])
        assert exp_max_receives == policy["maxReceiveCount"]
        assert exp_arn == policy["deadLetterTargetArn"]

        # Confirm dead letter queue is the one created at the beginning
        # of test.
        assert existing_queue.url == dl_queue.url
    finally:
        existing_queue.delete()
def test_createPolicy(sqs, iam):
    """Test policy creation"""

    proj = generate_proj()

    from ndingest.ndqueue.uploadqueue import UploadQueue

    UploadQueue.createQueue(proj)
    upload_queue = UploadQueue(proj)

    statements = [
        {
            "Sid": "ReceiveAccessStatement",
            "Effect": "Allow",
            "Action": ["sqs:ReceiveMessage"],
        }
    ]

    expName = upload_queue.generateQueueName(proj)
    expDesc = "Test policy creation"

    actual = upload_queue.createPolicy(statements, description=expDesc)

    try:
        assert expName == actual.policy_name
        assert expDesc == actual.description
        assert settings.IAM_POLICY_PATH == actual.path

        # Confirm resource set correctly to the upload queue.
        statements = actual.default_version.document["Statement"]
        arn = upload_queue.queue.attributes["QueueArn"]
        for stmt in statements:
            assert stmt["Resource"] == arn

    finally:
        actual.delete()