def test_2_ReadTranslated(self):
        # Start by reading some data
        graph = Graph()
        op = OpRESTfulBlockwiseFilesetReader(graph=graph)
        op.DescriptionFilePath.setValue( self.descriptionFilePath )
        
        logger.debug("test_2_Read(): Reading data")        
        slice1 = numpy.s_[ 20:30, 30:40, 40:50 ]
        readData = op.Output[ slice1 ].wait()
        assert readData.shape == (10, 10, 10)

        logger.debug("test_2_Read(): Creating translated description")        
        # Create a copy of the original description, but specify a translated (and smaller) view
        desc = BlockwiseFileset.readDescription(self.descriptionFilePath)
        desc.view_origin = [20, 30, 40]
        offsetConfigPath = self.descriptionFilePath + '_offset'
        BlockwiseFileset.writeDescription(offsetConfigPath, desc)

        # Read the same data as before using the translated view (offset our roi)
        opTranslated = OpRESTfulBlockwiseFilesetReader(graph=graph)
        opTranslated.DescriptionFilePath.setValue( offsetConfigPath )
        
        logger.debug("test_2_Read(): Reading translated data")        
        sliceTranslated = numpy.s_[ 0:10, 0:10, 0:10 ]
        translatedReadData = op.Output[ sliceTranslated ].wait()
        assert translatedReadData.shape == (10, 10, 10)
        assert (translatedReadData == readData).all(), "Data doesn't match!"
class OpBlockwiseFilesetReader(Operator):
    """
    Adapter that provides an operator interface to the BlockwiseFileset class for reading ONLY.
    """
    name = "OpBlockwiseFilesetReader"

    DescriptionFilePath = InputSlot(stype='filestring')
    Output = OutputSlot()

    def __init__(self, *args, **kwargs):
        super(OpBlockwiseFilesetReader, self).__init__(*args, **kwargs)
        self._blockwiseFileset = None

    def setupOutputs(self):
        # Load up the class that does the real work
        self._blockwiseFileset = BlockwiseFileset( self.DescriptionFilePath.value )

        # Check for errors in the description file
        descriptionFields = self._blockwiseFileset.description
        axes = descriptionFields.axes
        assert False not in map(lambda a: a in 'txyzc', axes), "Unknown axis type.  Known axes: txyzc  Your axes:".format(axes)

        self.Output.meta.shape = descriptionFields.view_shape
        self.Output.meta.dtype = descriptionFields.dtype
        self.Output.meta.axistags = vigra.defaultAxistags(descriptionFields.axes)

    def execute(self, slot, subindex, roi, result):
        assert slot == self.Output, "Unknown output slot"
        self._blockwiseFileset.readData( (roi.start, roi.stop), result )
        return result

    def propagateDirty(self, slot, subindex, roi):
        assert slot == self.DescriptionFilePath, "Unknown input slot."
        self.Output.setDirty( slice(None) )
    def setupOutputs(self):
        # Load up the class that does the real work
        self._blockwiseFileset = BlockwiseFileset( self.DescriptionFilePath.value )

        # Check for errors in the description file
        descriptionFields = self._blockwiseFileset.description
        axes = descriptionFields.axes
        assert False not in map(lambda a: a in 'txyzc', axes), "Unknown axis type.  Known axes: txyzc  Your axes:".format(axes)

        self.Output.meta.shape = descriptionFields.view_shape
        self.Output.meta.dtype = descriptionFields.dtype
        self.Output.meta.axistags = vigra.defaultAxistags(descriptionFields.axes)
Exemplo n.º 4
0
 def test_6_TestView(self):
     """
     Load some of the dataset again; this time with an offset view
     """
     # Create a copy of the original description, but specify a translated (and smaller) view
     desc = BlockwiseFileset.readDescription(self.configpath)
     desc.view_origin = [0, 300, 200, 100, 0]
     desc.view_shape = [1, 50, 50, 50, 1]
     offsetConfigPath = self.configpath + '_offset'
     BlockwiseFileset.writeDescription(offsetConfigPath, desc)
     
     # Open the fileset using the special description file
     bfs = BlockwiseFileset( offsetConfigPath, 'r' )
     assert (bfs.description.view_origin == desc.view_origin).all()
     assert (bfs.description.view_shape == desc.view_shape).all()
     
     # Read some data
     logger.debug( "Reading data..." )
     disk_slicing = numpy.s_[:, 300:350, 200:250, 100:150, :]
     view_slicing = numpy.s_[:, 0:50, 0:50, 0:50, :]
     roi = sliceToRoi( view_slicing, self.dataShape )
     roiShape = roi[1] - roi[0]
     read_data = numpy.zeros( tuple(roiShape), dtype=numpy.uint8 )
     
     bfs.readData( roi, read_data )
     
     # The data we read should match the correct part of the original dataset.
     logger.debug( "Checking data..." )
     assert self.data[disk_slicing].shape == read_data.shape
     assert (self.data[disk_slicing] == read_data).all(), "Data didn't match."
    def setUp(self):
        """
        Create a blockwise fileset to test with.
        """
        testConfig = \
        """
        {
            "_schema_name" : "blockwise-fileset-description",
            "_schema_version" : 1.0,

            "name" : "synapse_small",
            "format" : "hdf5",
            "axes" : "txyzc",
            "shape" : [1,400,400,100,1],
            "dtype" : "numpy.uint8",
            "block_shape" : [1, 50, 50, 50, 100],
            "block_file_name_format" : "cube{roiString}.h5/volume/data"
        }
        """
        self.tempDir = tempfile.mkdtemp()
        self.configpath = os.path.join(self.tempDir, "config.json")

        logger.debug( "Loading config file..." )
        with open(self.configpath, 'w') as f:
            f.write(testConfig)
        
        logger.debug( "Creating random test data..." )
        bfs = BlockwiseFileset( self.configpath, 'a' )
        dataShape = tuple(bfs.description.shape)
        self.data = numpy.random.randint( 255, size=dataShape ).astype(numpy.uint8)
        
        logger.debug( "Writing test data..." )
        datasetRoi = ([0,0,0,0,0], dataShape)
        bfs.writeData( datasetRoi, self.data )
        block_starts = getIntersectingBlocks(bfs.description.block_shape, datasetRoi)
        for block_start in block_starts:
            bfs.setBlockStatus(block_start, BlockwiseFileset.BLOCK_AVAILABLE)