Exemplo n.º 1
0
 def setup_class(self):
     """Setup class parameters"""
     # create the tile index table. skip if it exists
     try:
         TileIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT)
         CuboidIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT)
     except Exception as e:
         pass
     self.tileindex_db = TileIndexDB(nd_proj.project_name,
                                     endpoint_url=settings.DYNAMO_ENDPOINT)
     self.tile_bucket = TileBucket(nd_proj.project_name,
                                   endpoint_url=settings.S3_ENDPOINT)
     [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0]
     supercuboid_key = 'testing'
     message_id = '123456'
     receipt_handle = 'testing123456'
     message = serializer.encodeDeleteMessage(supercuboid_key, message_id,
                                              receipt_handle)
     # insert message in the upload queue
     CleanupQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT)
     self.cleanup_queue = CleanupQueue(nd_proj,
                                       endpoint_url=settings.SQS_ENDPOINT)
     self.cleanup_queue.sendMessage(message)
     # receive message and upload object
     for z_index in range(self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
         tile_handle = cStringIO.StringIO()
         self.tile_bucket.putObject(tile_handle, nd_proj.channel_name,
                                    nd_proj.resolution, self.x_tile,
                                    self.y_tile, z_index, message_id,
                                    receipt_handle)
Exemplo n.º 2
0
def test_sendBatchMessages(sqs):
    fake_data0 = {"foo": "bar"}
    fake_data1 = {"john": "doe"}
    jsonized0 = json.dumps(fake_data0)
    jsonized1 = json.dumps(fake_data1)
    md5_0 = hashlib.md5(jsonized0.encode("utf-8")).hexdigest()
    md5_1 = hashlib.md5(jsonized1.encode("utf-8")).hexdigest()

    proj = generate_proj()

    from ndingest.ndqueue.cleanupqueue import CleanupQueue

    CleanupQueue.createQueue(proj)
    cleanup_queue = CleanupQueue(proj)

    try:
        response = cleanup_queue.sendBatchMessages([fake_data0, fake_data1], 0)
        assert "Successful" in response
        success_ids = []
        for msg_result in response["Successful"]:
            id = msg_result["Id"]
            success_ids.append(id)
            if id == "0":
                assert md5_0 == msg_result["MD5OfMessageBody"]
            elif id == "1":
                assert md5_1 == msg_result["MD5OfMessageBody"]
        assert "0" in success_ids
        assert "1" in success_ids
    finally:
        for message_id, receipt_handle, _ in cleanup_queue.receiveMessage():
            cleanup_queue.deleteMessage(message_id, receipt_handle)
Exemplo n.º 3
0
    def createIngestJob(self, user_id, config_data):
        """Create an ingest job based on the posted config data"""

        config_data = json.loads(config_data)
        # validate schema
        if self.validateConfig(config_data):
            try:
                # create the upload queue
                UploadQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.upload_queue = UploadQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.upload_queue = self.upload_queue.url
                # create the ingest queue
                IngestQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.ingest_queue = IngestQueue(
                    self.nd_proj,
                    endpoint_url=ndingest_settings.SQS_ENDPOINT).url
                # create the cleanup queue
                CleanupQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.cleanup_queue = CleanupQueue(
                    self.nd_proj,
                    endpoint_url=ndingest_settings.SQS_ENDPOINT).url
                self.generateUploadTasks()
                self.ingest_job.user_id = user_id
                self.ingest_job.save()
                return NDIngestJob.serialize(self.ingest_job._job)

            except Exception as e:
                print(e)
                raise NDWSError(e)
Exemplo n.º 4
0
 def setup_class(self):
   """Setup class parameters"""
   if 'SQS_ENDPOINT' in dir(settings):
     self.endpoint_url = settings.SQS_ENDPOINT
   else:
     self.endpoint_url = None
   CleanupQueue.createQueue(nd_proj, endpoint_url=self.endpoint_url)
   self.cleanup_queue = CleanupQueue(nd_proj, endpoint_url=self.endpoint_url)
Exemplo n.º 5
0
    def setup_class(self):
        """Setup class parameters"""

        # create the tile index table. skip if it exists
        try:
            TileIndexDB.createTable(endpoint_url="http://localhost:8000")
            CuboidIndexDB.createTable(endpoint_url="http://localhost:8000")
        except Exception as e:
            pass
        self.tileindex_db = TileIndexDB(nd_proj.project_name,
                                        endpoint_url="http://localhost:8000")

        # create the tile bucket
        TileBucket.createBucket(endpoint_url="http://localhost:4567")
        self.tile_bucket = TileBucket(nd_proj.project_name,
                                      endpoint_url="http://localhost:4567")
        self.tiles = [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0]

        message_id = "testing"
        receipt_handle = "123456"
        # insert SUPER_CUBOID_SIZE tiles in the bucket
        for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
            tile_handle = cStringIO.StringIO()
            self.tile_bucket.putObject(
                tile_handle,
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                z_index,
                message_id,
                receipt_handle,
            )

        # creating the cuboid bucket
        CuboidBucket.createBucket(endpoint_url="http://localhost:4567")
        self.cuboid_bucket = CuboidBucket(nd_proj.project_name,
                                          endpoint_url="http://localhost:4567")

        # create the ingest queue
        IngestQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")
        self.ingest_queue = IngestQueue(nd_proj,
                                        endpoint_url="http://localhost:4568")

        # send message to the ingest queue
        morton_index = XYZMorton(self.tiles)
        supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
            nd_proj.channel_name, nd_proj.resolution, morton_index)
        response = self.ingest_queue.sendMessage(supercuboid_key)

        # create the cleanup queue
        CleanupQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")
Exemplo n.º 6
0
def test_message(sqs):
    """Testing the upload queue"""

    proj = generate_proj()

    from ndingest.ndqueue.cleanupqueue import CleanupQueue

    CleanupQueue.createQueue(proj)
    cleanup_queue = CleanupQueue(proj)

    supercuboid_key = "kasthuri11&image&0&0"
    cleanup_queue.sendMessage(supercuboid_key)
    for message_id, receipt_handle, message_body in cleanup_queue.receiveMessage():
        assert supercuboid_key == message_body
        response = cleanup_queue.deleteMessage(message_id, receipt_handle)
        assert "Successful" in response