def setupOutputs(self):
        if not os.path.exists(self.DescriptionFilePath.value):
            raise OpRESTfulBlockwiseFilesetReader.MissingDatasetError(
                "Dataset description not found: {}".format(
                    self.DescriptionFilePath.value))

        # Load up the class that does the real work
        self._blockwiseFileset = RESTfulBlockwiseFileset(
            self.DescriptionFilePath.value)

        # Check for errors in the description file
        localDescription = self._blockwiseFileset.compositeDescription.local_description
        axes = localDescription.axes
        assert False not in map(
            lambda a: a in 'txyzc',
            axes), "Unknown axis type.  Known axes: txyzc  Your axes:".format(
                axes)

        self.Output.meta.shape = tuple(localDescription.view_shape)
        self.Output.meta.dtype = localDescription.dtype
        self.Output.meta.axistags = vigra.defaultAxistags(
            localDescription.axes)
        drange = localDescription.drange
        if drange is not None:
            self.Output.meta.drange = drange
示例#2
0
class OpRESTfulBlockwiseFilesetReader(Operator):
    """
    Adapter that provides an operator interface to the BlockwiseFileset class for reading ONLY.
    """

    name = "OpRESTfulBlockwiseFilesetReader"

    DescriptionFilePath = InputSlot(stype="filestring")
    Output = OutputSlot()

    class MissingDatasetError(Exception):
        pass

    def __init__(self, *args, **kwargs):
        super(OpRESTfulBlockwiseFilesetReader, self).__init__(*args, **kwargs)
        self._blockwiseFileset = None

    def setupOutputs(self):
        if not os.path.exists(self.DescriptionFilePath.value):
            raise OpRESTfulBlockwiseFilesetReader.MissingDatasetError(
                "Dataset description not found: {}".format(
                    self.DescriptionFilePath.value))

        # Load up the class that does the real work
        self._blockwiseFileset = RESTfulBlockwiseFileset(
            self.DescriptionFilePath.value)

        # Check for errors in the description file
        localDescription = self._blockwiseFileset.compositeDescription.local_description
        axes = localDescription.axes
        assert False not in [
            a in "txyzc" for a in axes
        ], "Unknown axis type.  Known axes: txyzc  Your axes:".format(axes)

        self.Output.meta.shape = tuple(localDescription.view_shape)
        self.Output.meta.dtype = localDescription.dtype
        self.Output.meta.axistags = vigra.defaultAxistags(
            str(localDescription.axes))
        drange = localDescription.drange
        if drange is not None:
            self.Output.meta.drange = drange

    def execute(self, slot, subindex, roi, result):
        assert slot == self.Output, "Unknown output slot"
        self._blockwiseFileset.readData((roi.start, roi.stop), result)
        return result

    def propagateDirty(self, slot, subindex, roi):
        assert slot == self.DescriptionFilePath, "Unknown input slot."
        self.Output.setDirty(slice(None))

    def cleanUp(self):
        import sys

        if self._blockwiseFileset is not None:
            self._blockwiseFileset.close()
        super(OpRESTfulBlockwiseFilesetReader, self).cleanUp()
    def test_1_SingleDownload(self):
        volume = RESTfulBlockwiseFileset(self.descriptionFilePath)

        slicing = numpy.s_[0:20, 0:20, 0:20]
        roi = sliceToRoi(slicing, volume.description.shape)
        data = volume.readData(roi)
        assert data.shape == (20, 20, 20)

        assert volume.getBlockStatus(([0, 0, 0])) == BlockwiseFileset.BLOCK_AVAILABLE
    def test_1_SingleDownload(self):
        volume = RESTfulBlockwiseFileset(self.descriptionFilePath)

        slicing = numpy.s_[0:20, 0:20, 0:20]
        roi = sliceToRoi(slicing, volume.description.shape)
        data = volume.readData(roi)
        assert data.shape == (20, 20, 20)

        assert volume.getBlockStatus(([0, 0,
                                       0])) == BlockwiseFileset.BLOCK_AVAILABLE
class OpRESTfulBlockwiseFilesetReader(Operator):
    """
    Adapter that provides an operator interface to the BlockwiseFileset class for reading ONLY.
    """

    name = "OpRESTfulBlockwiseFilesetReader"

    DescriptionFilePath = InputSlot(stype="filestring")
    Output = OutputSlot()

    class MissingDatasetError(Exception):
        pass

    def __init__(self, *args, **kwargs):
        super(OpRESTfulBlockwiseFilesetReader, self).__init__(*args, **kwargs)
        self._blockwiseFileset = None

    def setupOutputs(self):
        if not os.path.exists(self.DescriptionFilePath.value):
            raise OpRESTfulBlockwiseFilesetReader.MissingDatasetError(
                "Dataset description not found: {}".format(self.DescriptionFilePath.value)
            )

        # Load up the class that does the real work
        self._blockwiseFileset = RESTfulBlockwiseFileset(self.DescriptionFilePath.value)

        # Check for errors in the description file
        localDescription = self._blockwiseFileset.compositeDescription.local_description
        axes = localDescription.axes
        assert False not in [a in "txyzc" for a in axes], "Unknown axis type.  Known axes: txyzc  Your axes:".format(
            axes
        )

        self.Output.meta.shape = tuple(localDescription.view_shape)
        self.Output.meta.dtype = localDescription.dtype
        self.Output.meta.axistags = vigra.defaultAxistags(str(localDescription.axes))
        drange = localDescription.drange
        if drange is not None:
            self.Output.meta.drange = drange

    def execute(self, slot, subindex, roi, result):
        assert slot == self.Output, "Unknown output slot"
        self._blockwiseFileset.readData((roi.start, roi.stop), result)
        return result

    def propagateDirty(self, slot, subindex, roi):
        assert slot == self.DescriptionFilePath, "Unknown input slot."
        self.Output.setDirty(slice(None))

    def cleanUp(self):
        import sys

        if self._blockwiseFileset is not None:
            self._blockwiseFileset.close()
        super(OpRESTfulBlockwiseFilesetReader, self).cleanUp()
    def setupOutputs(self):
        if not os.path.exists(self.DescriptionFilePath.value):
            raise OpRESTfulBlockwiseFilesetReader.MissingDatasetError("Dataset description not found: {}".format( self.DescriptionFilePath.value ) )
        
        # Load up the class that does the real work
        self._blockwiseFileset = RESTfulBlockwiseFileset( self.DescriptionFilePath.value )

        # Check for errors in the description file
        localDescription = self._blockwiseFileset.compositeDescription.local_description
        axes = localDescription.axes
        assert False not in map(lambda a: a in 'txyzc', axes), "Unknown axis type.  Known axes: txyzc  Your axes:".format(axes)

        self.Output.meta.shape = tuple(localDescription.view_shape)
        self.Output.meta.dtype = localDescription.dtype
        self.Output.meta.axistags = vigra.defaultAxistags(localDescription.axes)
        drange = localDescription.drange
        if drange is not None:
            self.Output.meta.drange = drange
    def test_4_OffsetDownload(self):
        volume = RESTfulBlockwiseFileset(self.descriptionFilePath)

        slicing = numpy.s_[20:40, 20:40, 20:40]
        roi = sliceToRoi(slicing, volume.description.shape)
        data = volume.readData(roi)
        assert data.shape == (20, 20, 20)
        assert volume.getBlockStatus(
            ([20, 20, 20])) == BlockwiseFileset.BLOCK_AVAILABLE

        offsetVolume = RESTfulBlockwiseFileset(self.descriptionFilePath_offset)
        offsetSlicing = numpy.s_[
            20:40, 0:20, 20:
            40]  # Note middle slice is offset (see view_origin in setupClass)
        offsetRoi = sliceToRoi(offsetSlicing, offsetVolume.description.shape)
        offsetData = offsetVolume.readData(offsetRoi)
        assert offsetData.shape == (20, 20, 20)
        assert offsetVolume.getBlockStatus(
            ([20, 0, 20])) == BlockwiseFileset.BLOCK_AVAILABLE

        # Data should be the same
        assert (offsetData == data).all()
    def test_4_OffsetDownload(self):
        volume = RESTfulBlockwiseFileset(self.descriptionFilePath)

        slicing = numpy.s_[20:40, 20:40, 20:40]
        roi = sliceToRoi(slicing, volume.description.shape)
        data = volume.readData(roi)
        assert data.shape == (20, 20, 20)
        assert volume.getBlockStatus(([20, 20, 20])) == BlockwiseFileset.BLOCK_AVAILABLE

        offsetVolume = RESTfulBlockwiseFileset(self.descriptionFilePath_offset)
        offsetSlicing = numpy.s_[20:40, 0:20, 20:40]  # Note middle slice is offset (see view_origin in setup_class)
        offsetRoi = sliceToRoi(offsetSlicing, offsetVolume.description.shape)
        offsetData = offsetVolume.readData(offsetRoi)
        assert offsetData.shape == (20, 20, 20)
        assert offsetVolume.getBlockStatus(([20, 0, 20])) == BlockwiseFileset.BLOCK_AVAILABLE

        # Data should be the same
        assert (offsetData == data).all()
    def setupClass(cls):
        # The openconnectome site appears to be down at the moment.
        # This test fails when that happens...
        raise nose.SkipTest

        if platform.system() == 'Windows':
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest

        try:
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest

        cls.tempDir = tempfile.mkdtemp()
        logger.debug("Working in {}".format(cls.tempDir))

        # Create the two sub-descriptions
        Bock11VolumeDescription = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,
        
            "name" : "Bock11-level0",
            "format" : "hdf5",
            "axes" : "zyx",
            "##NOTE":"The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "origin_offset" : [2917, 50000, 50000],
            "bounds" : [4156, 135424, 119808],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/ocp/ca/bock11/hdf5/0/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "CUTOUT"
        }
        """

        blockwiseFilesetDescription = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,

            "name" : "bock11-blocks",
            "format" : "hdf5",
            "axes" : "zyx",
            "shape" : [40,40,40],
            "dtype" : "numpy.uint8",
            "block_shape" : [20, 20, 20],
            "block_file_name_format" : "block-{roiString}.h5/CUTOUT",
            "dataset_root_dir" : "blocks"
        }
        """

        # Combine them into the composite description (see RESTfulBlockwiseFileset.DescriptionFields)
        compositeDescription = \
        """
        {{
            "_schema_name" : "RESTful-blockwise-fileset-description",
            "_schema_version" : 1.0,

            "remote_description" : {remote_description},
            "local_description" : {local_description}        
        }}
        """.format( remote_description=Bock11VolumeDescription, local_description=blockwiseFilesetDescription )

        # Create the description file
        cls.descriptionFilePath = os.path.join(cls.tempDir, "description.json")
        with open(cls.descriptionFilePath, 'w') as f:
            f.write(compositeDescription)

        # Create a new fileset that views the same data and stores it the
        #  same way locally, but this time we'll use an offset 'view'
        # Start with a copy of the non-offset description
        offsetDescription = RESTfulBlockwiseFileset.readDescription(
            cls.descriptionFilePath)
        offsetDescription.local_description.view_origin = numpy.array(
            [0, 20, 0])
        offsetDescription.local_description.dataset_root_dir = "offset_blocks"
        cls.descriptionFilePath_offset = os.path.join(
            cls.tempDir, "description_offset.json")
        RESTfulBlockwiseFileset.writeDescription(
            cls.descriptionFilePath_offset, offsetDescription)
    def setup_class(cls):
        # The openconnectome site appears to be down at the moment.
        # This test fails when that happens...
        raise nose.SkipTest

        if platform.system() == "Windows":
            # On windows, there are errors, and we make no attempt to solve them (at the moment).
            raise nose.SkipTest

        try:
            BlockwiseFileset._prepare_system()
        except ValueError:
            # If the system isn't configured to allow lots of open files, we can't run this test.
            raise nose.SkipTest

        cls.tempDir = tempfile.mkdtemp()
        logger.debug("Working in {}".format(cls.tempDir))

        # Create the two sub-descriptions
        Bock11VolumeDescription = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,

            "name" : "Bock11-level0",
            "format" : "hdf5",
            "axes" : "zyx",
            "##NOTE":"The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "origin_offset" : [2917, 50000, 50000],
            "bounds" : [4156, 135424, 119808],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/ocp/ca/bock11/hdf5/0/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "CUTOUT"
        }
        """

        blockwiseFilesetDescription = """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,

            "name" : "bock11-blocks",
            "format" : "hdf5",
            "axes" : "zyx",
            "shape" : [40,40,40],
            "dtype" : "numpy.uint8",
            "block_shape" : [20, 20, 20],
            "block_file_name_format" : "block-{roiString}.h5/CUTOUT",
            "dataset_root_dir" : "blocks"
        }
        """

        # Combine them into the composite description (see RESTfulBlockwiseFileset.DescriptionFields)
        compositeDescription = """
        {{
            "_schema_name" : "RESTful-blockwise-fileset-description",
            "_schema_version" : 1.0,

            "remote_description" : {remote_description},
            "local_description" : {local_description}
        }}
        """.format(
            remote_description=Bock11VolumeDescription, local_description=blockwiseFilesetDescription
        )

        # Create the description file
        cls.descriptionFilePath = os.path.join(cls.tempDir, "description.json")
        with open(cls.descriptionFilePath, "w") as f:
            f.write(compositeDescription)

        # Create a new fileset that views the same data and stores it the
        #  same way locally, but this time we'll use an offset 'view'
        # Start with a copy of the non-offset description
        offsetDescription = RESTfulBlockwiseFileset.readDescription(cls.descriptionFilePath)
        offsetDescription.local_description.view_origin = numpy.array([0, 20, 0])
        offsetDescription.local_description.dataset_root_dir = "offset_blocks"
        cls.descriptionFilePath_offset = os.path.join(cls.tempDir, "description_offset.json")
        RESTfulBlockwiseFileset.writeDescription(cls.descriptionFilePath_offset, offsetDescription)