コード例 #1
0
class Test_Cuboid_Bucket:
    def setup_class(self):
        """Setup Parameters"""
        if "S3_ENDPOINT" in dir(settings):
            self.endpoint_url = settings.S3_ENDPOINT
        else:
            self.endpoint_url = None
        CuboidBucket.createBucket(endpoint_url=self.endpoint_url)
        self.cuboid_bucket = CuboidBucket(
            nd_proj.project_name, endpoint_url=self.endpoint_url
        )

    def teardown_class(self):
        """Teardown Parameters"""

        # Ensure bucket empty before deleting.
        for objs in self.cuboid_bucket.getAllObjects():
            self.cuboid_bucket.deleteObject(objs.key)

        CuboidBucket.deleteBucket(endpoint_url=self.endpoint_url)

    @pytest.mark.skipif(
        settings.PROJECT_NAME == "Boss", reason="putObject() not supported by the Boss"
    )
    def test_put_object(self):
        """Testing put object"""
        cube_data = blosc.pack_array(np.zeros(settings.SUPER_CUBOID_SIZE))
        for morton_index in range(0, 10, 1):
            self.cuboid_bucket.putObject(
                nd_proj.channel_name, nd_proj.resolution, morton_index, cube_data
            )

        for morton_index in range(0, 10, 1):
            supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
                nd_proj.channel_name, nd_proj.resolution, morton_index
            )
            self.cuboid_bucket.deleteObject(supercuboid_key)

    def test_put_object_by_key(self):
        hashm = hashlib.md5()
        hashm.update(b"test_cuboidbucket_data")
        cube_data = blosc.pack_array(np.zeros(settings.SUPER_CUBOID_SIZE))

        for morton_index in range(0, 10, 1):
            key = "{}&{}".format(hashm.hexdigest(), morton_index)
            self.cuboid_bucket.putObjectByKey(key, cube_data)

        for morton_index in range(0, 10, 1):
            key = "{}&{}".format(hashm.hexdigest(), morton_index)
            self.cuboid_bucket.deleteObject(key)
コード例 #2
0
class Test_IngestLambda:
    def setup_class(self):
        """Setup class parameters"""

        # create the tile index table. skip if it exists
        try:
            TileIndexDB.createTable(endpoint_url="http://localhost:8000")
            CuboidIndexDB.createTable(endpoint_url="http://localhost:8000")
        except Exception as e:
            pass
        self.tileindex_db = TileIndexDB(nd_proj.project_name,
                                        endpoint_url="http://localhost:8000")

        # create the tile bucket
        TileBucket.createBucket(endpoint_url="http://localhost:4567")
        self.tile_bucket = TileBucket(nd_proj.project_name,
                                      endpoint_url="http://localhost:4567")
        self.tiles = [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0]

        message_id = "testing"
        receipt_handle = "123456"
        # insert SUPER_CUBOID_SIZE tiles in the bucket
        for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
            tile_handle = cStringIO.StringIO()
            self.tile_bucket.putObject(
                tile_handle,
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                z_index,
                message_id,
                receipt_handle,
            )

        # creating the cuboid bucket
        CuboidBucket.createBucket(endpoint_url="http://localhost:4567")
        self.cuboid_bucket = CuboidBucket(nd_proj.project_name,
                                          endpoint_url="http://localhost:4567")

        # create the ingest queue
        IngestQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")
        self.ingest_queue = IngestQueue(nd_proj,
                                        endpoint_url="http://localhost:4568")

        # send message to the ingest queue
        morton_index = XYZMorton(self.tiles)
        supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
            nd_proj.channel_name, nd_proj.resolution, morton_index)
        response = self.ingest_queue.sendMessage(supercuboid_key)

        # create the cleanup queue
        CleanupQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")

    def teardown_class(self):
        """Teardown class parameters"""

        # cleanup tilebucket
        for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
            tile_key = self.tile_bucket.encodeObjectKey(
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                z_index,
            )
            self.tile_bucket.deleteObject(tile_key)

        morton_index = XYZMorton(self.tiles)
        supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
            nd_proj.channel_name, nd_proj.resolution, self.tiles)
        self.cuboid_bucket.deleteObject(supercuboid_key)
        # delete created entities
        TileIndexDB.deleteTable(endpoint_url="http://localhost:8000")
        CuboidIndexDB.deleteTable(endpoint_url="http://localhost:8000")
        IngestQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568")
        CleanupQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568")
        TileBucket.deleteBucket(endpoint_url="http://localhost:4567")
        try:
            CuboidBucket.deleteBucket(endpoint_url="http://localhost:4567")
        except Exception as e:
            pass

    def test_Uploadevent(self):
        """Testing the event"""
        # creating an emulambda function
        func = emulambda.import_lambda("ingestlambda.lambda_handler")
        # creating an emulambda event
        event = emulambda.parse_event(
            open("../ndlambda/functions/ingest/ingest_event.json").read())
        # calling the emulambda function to invoke a lambda
        emulambda.invoke_lambda(func, event, None, 0, None)

        # testing if the supercuboid was inserted in the bucket
        morton_index = XYZMorton(self.tiles)
        cuboid = self.cuboid_bucket.getObject(nd_proj.channel_name,
                                              nd_proj.resolution, morton_index)

        # testing if the message was removed from the ingest queue
        for message in self.ingest_queue.receiveMessage():
            # KL TODO write the message id into the JSON event file directly
            print(message)