示例#1
0
    def delete_tiles(self, ingest_job):
        """
        Delete all remaining tiles from the tile index database and tile bucket
        Args:
            ingest_job: Ingest job model

        Returns:
            None
        Raises:
            BossError : For exceptions that happen while deleting the tiles and index

        """
        try:
            # Get all the chunks for a job
            tiledb = BossTileIndexDB(ingest_job.collection + '&' +
                                     ingest_job.experiment)
            tilebucket = TileBucket(ingest_job.collection + '&' +
                                    ingest_job.experiment)
            chunks = list(tiledb.getTaskItems(ingest_job.id))

            for chunk in chunks:
                chunk_key = chunk['chunk_key']
                # delete each tile in the chunk
                for key in chunk['tile_uploaded_map']:
                    response = tilebucket.deleteObject(key)
                tiledb.deleteCuboid(chunk['chunk_key'], ingest_job.id)

        except Exception as e:
            raise BossError(
                "Exception while deleteing tiles for the ingest job {}. {}".
                format(ingest_job.id, e), ErrorCodes.BOSS_SYSTEM_ERROR)
示例#2
0
    def delete_tiles(self, ingest_job):
        """
        Delete all remaining tiles from the tile index database and tile bucket

        5/24/2018 - This code depends on a GSI for the tile index.  The GSI was
        removed because its key didn't shard well.  Cleanup will now be handled
        by TTL policies applied to the tile bucket and the tile index.  This
        method will be removed once that code is merged.

        Args:
            ingest_job: Ingest job model

        Returns:
            None
        Raises:
            BossError : For exceptions that happen while deleting the tiles and index

        """
        try:
            # Get all the chunks for a job
            tiledb = BossTileIndexDB(ingest_job.collection + '&' + ingest_job.experiment)
            tilebucket = TileBucket(ingest_job.collection + '&' + ingest_job.experiment)
            chunks = list(tiledb.getTaskItems(ingest_job.id))

            for chunk in chunks:
                # delete each tile in the chunk
                for key in chunk['tile_uploaded_map']:
                    response = tilebucket.deleteObject(key)
                tiledb.deleteCuboid(chunk['chunk_key'], ingest_job.id)

        except Exception as e:
            raise BossError("Exception while deleteing tiles for the ingest job {}. {}".format(ingest_job.id, e),
                            ErrorCodes.BOSS_SYSTEM_ERROR)
示例#3
0
class Test_IngestLambda:
    def setup_class(self):
        """Setup class parameters"""

        # create the tile index table. skip if it exists
        try:
            TileIndexDB.createTable(endpoint_url="http://localhost:8000")
            CuboidIndexDB.createTable(endpoint_url="http://localhost:8000")
        except Exception as e:
            pass
        self.tileindex_db = TileIndexDB(nd_proj.project_name,
                                        endpoint_url="http://localhost:8000")

        # create the tile bucket
        TileBucket.createBucket(endpoint_url="http://localhost:4567")
        self.tile_bucket = TileBucket(nd_proj.project_name,
                                      endpoint_url="http://localhost:4567")
        self.tiles = [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0]

        message_id = "testing"
        receipt_handle = "123456"
        # insert SUPER_CUBOID_SIZE tiles in the bucket
        for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
            tile_handle = cStringIO.StringIO()
            self.tile_bucket.putObject(
                tile_handle,
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                z_index,
                message_id,
                receipt_handle,
            )

        # creating the cuboid bucket
        CuboidBucket.createBucket(endpoint_url="http://localhost:4567")
        self.cuboid_bucket = CuboidBucket(nd_proj.project_name,
                                          endpoint_url="http://localhost:4567")

        # create the ingest queue
        IngestQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")
        self.ingest_queue = IngestQueue(nd_proj,
                                        endpoint_url="http://localhost:4568")

        # send message to the ingest queue
        morton_index = XYZMorton(self.tiles)
        supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
            nd_proj.channel_name, nd_proj.resolution, morton_index)
        response = self.ingest_queue.sendMessage(supercuboid_key)

        # create the cleanup queue
        CleanupQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")

    def teardown_class(self):
        """Teardown class parameters"""

        # cleanup tilebucket
        for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
            tile_key = self.tile_bucket.encodeObjectKey(
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                z_index,
            )
            self.tile_bucket.deleteObject(tile_key)

        morton_index = XYZMorton(self.tiles)
        supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
            nd_proj.channel_name, nd_proj.resolution, self.tiles)
        self.cuboid_bucket.deleteObject(supercuboid_key)
        # delete created entities
        TileIndexDB.deleteTable(endpoint_url="http://localhost:8000")
        CuboidIndexDB.deleteTable(endpoint_url="http://localhost:8000")
        IngestQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568")
        CleanupQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568")
        TileBucket.deleteBucket(endpoint_url="http://localhost:4567")
        try:
            CuboidBucket.deleteBucket(endpoint_url="http://localhost:4567")
        except Exception as e:
            pass

    def test_Uploadevent(self):
        """Testing the event"""
        # creating an emulambda function
        func = emulambda.import_lambda("ingestlambda.lambda_handler")
        # creating an emulambda event
        event = emulambda.parse_event(
            open("../ndlambda/functions/ingest/ingest_event.json").read())
        # calling the emulambda function to invoke a lambda
        emulambda.invoke_lambda(func, event, None, 0, None)

        # testing if the supercuboid was inserted in the bucket
        morton_index = XYZMorton(self.tiles)
        cuboid = self.cuboid_bucket.getObject(nd_proj.channel_name,
                                              nd_proj.resolution, morton_index)

        # testing if the message was removed from the ingest queue
        for message in self.ingest_queue.receiveMessage():
            # KL TODO write the message id into the JSON event file directly
            print(message)
示例#4
0
                        resource.data['channel']['name'])
                    sns_client.publish(
                        TopicArn=topic_arn,
                        Subject='Object services misuse',
                        Message=msg)

    # Delete message since it was processed successfully
    ingest_queue.deleteMessage(msg_id, msg_rx_handle)

    # Delete Tiles
    for tile in tile_key_list:
        for try_cnt in range(0, 4):
            try:
                time.sleep(try_cnt)
                print("Deleting tile: {}".format(tile))
                tile_bucket.deleteObject(tile)
                break
            except:
                print("failed")

    # Delete Entry in tile table
    for try_cnt in range(0, 4):
        try:
            time.sleep(try_cnt)
            tile_index_db.deleteCuboid(chunk_key, int(msg_data["ingest_job"]))
            break
        except:
            print("failed")

    # Increment run counter
    run_cnt += 1