コード例 #1
0
    def setupOutputs(self):
        # Create a RESTfulVolume object to read the description file and do the downloads.
        self._volumeObject = RESTfulVolume(self.DescriptionFilePath.value)

        self._axes = self._volumeObject.description.axes
        outputShape = tuple(self._volumeObject.description.shape)

        # If the dataset has no channel axis, add one.
        if 'c' not in self._axes:
            outputShape += (1, )
            self._axes += 'c'

        self.Output.meta.shape = outputShape
        self.Output.meta.dtype = self._volumeObject.description.dtype
        self.Output.meta.axistags = vigra.defaultAxistags(self._axes)
コード例 #2
0
    def __init__(self, compositeDescriptionPath):
        """
        Constructor.  Uses `readDescription` interally.
        
        :param compositeDescriptionPath: The path to a JSON file that describes both the remote 
                                         volume and local storage structure.  The JSON file schema is specified by 
                                         :py:data:`RESTfulBlockwiseFileset.DescriptionFields`.
        """
        # Parse the description file, which contains sub-configs for the blockwise description and RESTful description
        self.compositeDescription = RESTfulBlockwiseFileset.readDescription(
            compositeDescriptionPath)

        self.localDescription = self.compositeDescription.local_description
        self.remoteDescription = self.compositeDescription.remote_description

        super(RESTfulBlockwiseFileset,
              self).__init__(compositeDescriptionPath,
                             'r',
                             preparsedDescription=self.localDescription)
        self._remoteVolume = RESTfulVolume(
            preparsedDescription=self.remoteDescription)

        try:
            if not self.localDescription.block_file_name_format.endswith(
                    self.remoteDescription.hdf5_dataset):
                msg = "Your RESTful volume description file must specify an hdf5 internal dataset name that matches the one in your Blockwise Fileset description file!"
                msg += "RESTful volume dataset name is '{}', but blockwise fileset format is '{}'".format(
                    self.remoteDescription.hdf5_dataset,
                    self.localDescription.block_file_name_format)
                raise RuntimeError(msg)
            if self.localDescription.axes != self.remoteDescription.axes:
                raise RuntimeError(
                    "Your RESTful volume's axes must match the blockwise dataset axes. ('{}' does not match '{}')"
                    .format(self.remoteDescription.axes,
                            self.localDescription.axes))
            if (numpy.array(self.localDescription.shape) > numpy.array(
                    self.remoteDescription.shape)).any():
                raise RuntimeError(
                    "Your local blockwise volume shape must be smaller in all dimensions than the remote volume shape."
                )
        except:
            logger.error("Error loading dataset from {}".format(
                compositeDescriptionPath))
            raise
コード例 #3
0
ファイル: testRESTfulVolume.py プロジェクト: tatung/lazyflow
    def testBasic(self):
        """
        Requires access to the Internet...
        """
        # The openconnectome site appears to be down at the moment.
        # This test fails when that happens...
        import nose
        raise nose.SkipTest

        testConfig0 = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,
        
            "name" : "Bock11-level0",
            "format" : "hdf5",
            "axes" : "zyx",
            "##NOTE":"The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "origin_offset" : [2917, 50000, 50000],
            "bounds" : [4156, 135424, 119808],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/ocp/ca/bock11/hdf5/0/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "CUTOUT"
        }
        """

        testConfig4 = """
        {
            "_schema_name" : "RESTful-volume-description",
            "_schema_version" : 1.0,
        
            "name" : "Bock11-level4",
            "format" : "hdf5",
            "axes" : "zyx",
            "##NOTE":"The first z-slice of the bock dataset is 2917, so the origin_offset must be at least 2917",
            "origin_offset" : [2917, 50000, 50000],
            "bounds" : [4156, 8704, 7680],
            "dtype" : "numpy.uint8",
            "url_format" : "http://openconnecto.me/ocp/ca/bock11/hdf5/4/{x_start},{x_stop}/{y_start},{y_stop}/{z_start},{z_stop}/",
            "hdf5_dataset" : "CUTOUT"
        }
        """

        # Create the description file.
        tempDir = tempfile.mkdtemp()
        descriptionFilePath = os.path.join(tempDir, 'desc.json')
        with open(descriptionFilePath, 'w') as descFile:
            descFile.write(testConfig0)

        # Create the volume object
        volume = RESTfulVolume(descriptionFilePath)

        #slicing = numpy.s_[0:100, 4000:4200, 4000:4200]
        slicing = numpy.s_[0:25, 50000:50050, 50000:50075]
        roi = sliceToRoi(slicing, volume.description.shape)
        outputFile = os.path.join(tempDir, 'volume.h5')
        datasetPath = outputFile + '/cube'
        logger.debug("Downloading subvolume to: {}".format(datasetPath))
        volume.downloadSubVolume(roi, datasetPath)

        with h5py.File(outputFile, 'r') as hdf5File:
            data = hdf5File['cube']
            assert data.shape == (25, 50, 75)

        shutil.rmtree(tempDir)