Example #1
0
    def testBasic(self):
        """
        Requires access to the Internet...
        """
        testConfig0 = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,
        
            "name" : "Bock11-level0",
            "format" : "hdf5",
            "axes" : "zyx",
            "##NOTE":"The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "origin_offset" : [2917, 50000, 50000],
            "bounds" : [4156, 135424, 119808],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/emca/bock11/hdf5/0/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "cube"
        }
        """
        
        testConfig4 = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,
        
            "name" : "Bock11-level4",
            "format" : "hdf5",
            "axes" : "zyx",
            "##NOTE":"The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "origin_offset" : [2917, 50000, 50000],
            "bounds" : [4156, 8704, 7680],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/emca/bock11/hdf5/4/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "cube"
        }
        """
        
        # Create the description file.
        tempDir = tempfile.mkdtemp()
        descriptionFilePath = os.path.join(tempDir, 'desc.json')
        with open(descriptionFilePath, 'w') as descFile:
            descFile.write( testConfig0 )

        # Create the volume object
        volume = RESTfulVolume( descriptionFilePath )

        #slicing = numpy.s_[0:100, 4000:4200, 4000:4200]
        slicing = numpy.s_[0:25, 50000:50050, 50000:50075]
        roi = sliceToRoi( slicing, volume.description.shape )
        outputFile = os.path.join(tempDir, 'volume.h5')
        datasetPath = outputFile + '/cube'
        logger.debug("Downloading subvolume to: {}".format( datasetPath ))
        volume.downloadSubVolume(roi, datasetPath)        

        with h5py.File(outputFile, 'r') as hdf5File:
            data = hdf5File['cube']
            assert data.shape == ( 25, 50, 75 )

        shutil.rmtree(tempDir)
class OpRESTfulVolumeReader(Operator):
    """
    An operator to retrieve hdf5 volumes from a remote server that provides a RESTful interface.
    The operator requires a LOCAL json config file that describes the remote dataset and interface.
    """
    name = "OpRESTfulVolumeReader"

    DescriptionFilePath = InputSlot(stype='filestring')
    Output = OutputSlot()

    def __init__(self, *args, **kwargs):
        super(OpRESTfulVolumeReader, self).__init__(*args, **kwargs)
        self._axes = None
        self._volumeObject = None

    def setupOutputs(self):
        # Create a RESTfulVolume object to read the description file and do the downloads.
        self._volumeObject = RESTfulVolume( self.DescriptionFilePath.value )

        self._axes = self._volumeObject.description.axes 
        outputShape = tuple( self._volumeObject.description.shape )

        # If the dataset has no channel axis, add one.
        if 'c' not in self._axes:
            outputShape += (1,)
            self._axes += 'c'

        self.Output.meta.shape = outputShape
        self.Output.meta.dtype = self._volumeObject.description.dtype
        self.Output.meta.axistags = vigra.defaultAxistags(self._axes)

    def execute(self, slot, subindex, roi, result):
        roi = copy.copy(roi)

        # If we are artificially adding a channel index, remove it from the roi for the download.
        if len(self.Output.meta.shape) > len(self._volumeObject.description.shape):
            roi.start.pop( self.Output.meta.axistags.index('c') )
            roi.stop.pop( self.Output.meta.axistags.index('c') )

        # Write the data from the url out to disk (in a temporary file)
        hdf5FilePath = os.path.join(tempfile.mkdtemp(), 'cube.h5')
        hdf5DatasetPath = hdf5FilePath + self._volumeObject.description.hdf5_dataset
        self._volumeObject.downloadSubVolume( (roi.start, roi.stop), hdf5DatasetPath )

        # Open the file we just created using h5py
        with h5py.File( hdf5FilePath, 'r' ) as hdf5File:
            dataset = hdf5File[self._volumeObject.description.hdf5_dataset]
            if len(result.shape) > len(dataset.shape):
                # We appended a channel axis to Output, but the dataset doesn't have that.
                result[...,0] = dataset[...]
            else:
                result[...] = dataset[...]
        return result

    def propagateDirty(self, slot, subindex, roi):
        assert slot == self.DescriptionFilePath, "Unknown input slot."
        self.Output.setDirty( slice(None) )
    def setupOutputs(self):
        # Create a RESTfulVolume object to read the description file and do the downloads.
        self._volumeObject = RESTfulVolume( self.DescriptionFilePath.value )

        self._axes = self._volumeObject.description.axes 
        outputShape = tuple( self._volumeObject.description.shape )

        # If the dataset has no channel axis, add one.
        if 'c' not in self._axes:
            outputShape += (1,)
            self._axes += 'c'

        self.Output.meta.shape = outputShape
        self.Output.meta.dtype = self._volumeObject.description.dtype
        self.Output.meta.axistags = vigra.defaultAxistags(self._axes)
 def readDescription(cls, descriptionFilePath):
     description = RESTfulBlockwiseFileset.DescriptionSchema.parseConfigFile( descriptionFilePath )
     RESTfulVolume.updateDescription(description.remote_description)
     return description