def singleFeatureTest(fu, typ, zDir):
     xname = 'feats_%s_%s_%i_xy.h5' % (name, typ, zDir)
     zname = 'feats_%s_%s_%i_z.h5' % (name, typ, zDir)
     xy_file = nh5.createFile(xname)
     z_file = nh5.createFile(zname)
     xy_shape = [
         rag.totalNumberOfInSliceEdges if typ in ('xy', 'both') else 1,
         9 if name == 'standard' else 9 * 12
     ]
     xy_chunks = [min(2500, xy_shape[0]), xy_shape[1]]
     z_shape = [
         rag.totalNumberOfInBetweenSliceEdges if typ in ('z', 'both') else 1,
         9 if name == 'standard' else 9 * 12
     ]
     z_chunks = [min(2500, z_shape[0]), z_shape[1]]
     xy_array = nh5.hdf5Array('float32', xy_file, 'data', xy_shape, xy_chunks)
     z_array = nh5.hdf5Array('float32', z_file, 'data', z_shape, z_chunks)
     fu(rag, self.dataArray, xy_array, z_array, zDirection=zDir)
     xfeats = xy_array.readSubarray([0, 0], xy_shape)
     zfeats = z_array.readSubarray([0, 0], z_shape)
     nh5.closeFile(xy_file)
     nh5.closeFile(z_file)
     os.remove(xname)
     os.remove(zname)
     return xname, zname, xfeats, zfeats
Пример #2
0
 def singleFeatureTest(fu, typ, zDir):
     xname = 'feats_%s_%s_%i_xy.h5' % (name, typ, zDir)
     zname = 'feats_%s_%s_%i_z.h5' % (name, typ, zDir)
     xy_file = nh5.createFile(xname)
     z_file = nh5.createFile(zname)
     xy_shape = [
         rag.totalNumberOfInSliceEdges if typ in ('xy',
                                                  'both') else 1,
         9 if name == 'standard' else 9 * 12
     ]
     xy_chunks = [min(2500, xy_shape[0]), xy_shape[1]]
     z_shape = [
         rag.totalNumberOfInBetweenSliceEdges
         if typ in ('z', 'both') else 1,
         9 if name == 'standard' else 9 * 12
     ]
     z_chunks = [min(2500, z_shape[0]), z_shape[1]]
     xy_array = nh5.hdf5Array('float32', xy_file, 'data', xy_shape,
                              xy_chunks)
     z_array = nh5.hdf5Array('float32', z_file, 'data', z_shape,
                             z_chunks)
     fu(rag, self.dataArray, xy_array, z_array, zDirection=zDir)
     xfeats = xy_array.readSubarray([0, 0], xy_shape)
     zfeats = z_array.readSubarray([0, 0], z_shape)
     nh5.closeFile(xy_file)
     nh5.closeFile(z_file)
     os.remove(xname)
     os.remove(zname)
     return xname, zname, xfeats, zfeats
Пример #3
0
    def test_create_zipped_array(self):
        import nifty.hdf5 as nhdf5
        fpath = os.path.join(self.tempFolder, '_nifty_test_array_.h5')

        shape = [101,102,103]
        chunks = [11,12,13]
        hidT = nhdf5.createFile(fpath)
        array = nhdf5.Hdf5ArrayUInt64(
            groupHandle=hidT,
            datasetName="data",
            shape=shape,
            chunkShape=chunks,
            compression=9
        )

        ashape = array.shape
        self.assertEqual(array.ndim, 3)
        self.assertEqual(shape, ashape)

        chunkShape = array.chunkShape
        self.assertEqual(chunkShape, chunks)

        ends = [10,11,12]

        toWrite = numpy.arange(ends[0]*ends[1]*ends[2]).reshape(ends)
        array[0:ends[0], 0:ends[1], 0:ends[2]] = toWrite
        subarray = array[0:ends[0], 0:ends[1], 0:ends[2]]

        self.assertTrue(numpy.array_equal(toWrite, subarray))
Пример #4
0
    def test_create_zipped_array(self):
        import nifty.hdf5 as nhdf5
        fpath = os.path.join(self.tempFolder, '_nifty_test_array_.h5')

        shape = [101,102,103]
        chunks = [11,12,13]
        hidT = nhdf5.createFile(fpath)
        array = nhdf5.Hdf5ArrayUInt64(
            groupHandle=hidT,
            datasetName="data",
            shape=shape,
            chunkShape=chunks,
            compression=9
        )

        ashape = array.shape
        self.assertEqual(array.ndim, 3)
        self.assertEqual(shape, ashape)

        chunkShape = array.chunkShape
        self.assertEqual(chunkShape, chunks)

        ends = [10,11,12]

        toWrite = numpy.arange(ends[0]*ends[1]*ends[2]).reshape(ends)
        array[0:ends[0], 0:ends[1], 0:ends[2]] = toWrite
        subarray = array[0:ends[0], 0:ends[1], 0:ends[2]]

        self.assertTrue(numpy.array_equal(toWrite, subarray))
Пример #5
0
 def test_stacked_rag_hdf5_serialize_deserialize(self):
     import nifty.hdf5 as nhdf5
     hidT = nhdf5.createFile(self.path)
     chunkShape = [1, 2, 1]
     array = nhdf5.Hdf5ArrayUInt32(hidT, "data", self.bigShape, chunkShape)
     array[0:self.bigShape[0], 0:self.bigShape[1], 0:self.bigShape[2]] = self.bigLabels
     self.serialization_test(array, nrag.gridRagStacked2DHdf5)
     nhdf5.closeFile(hidT)
Пример #6
0
 def test_grid_rag_hdf5_stacked2d(self):
     import nifty.hdf5 as nhdf5
     hidT = nhdf5.createFile(self.path)
     chunkShape = [1, 2, 1]
     array = nhdf5.Hdf5ArrayUInt32(hidT, "data", self.shape, chunkShape)
     array[0:self.shape[0], 0:self.shape[1], 0:self.shape[2]] = self.labels
     self.small_array_test(array, nrag.gridRagStacked2DHdf5)
     nhdf5.closeFile(hidT)
Пример #7
0
 def test_grid_rag_hdf5_stacked2d(self):
     import nifty.hdf5 as nhdf5
     hidT = nhdf5.createFile(self.path)
     chunkShape = [1, 2, 1]
     array = nhdf5.Hdf5ArrayUInt32(hidT, "data", self.shape, chunkShape)
     array[0:self.shape[0], 0:self.shape[1], 0:self.shape[2]] = self.labels
     self.small_array_test(array, nrag.gridRagStacked2DHdf5)
     nhdf5.closeFile(hidT)
Пример #8
0
 def test_stacked_rag_hdf5_serialize_deserialize(self):
     import nifty.hdf5 as nhdf5
     hidT = nhdf5.createFile(self.path)
     chunkShape = [1, 2, 1]
     array = nhdf5.Hdf5ArrayUInt32(hidT, "data", self.bigShape, chunkShape)
     array[0:self.bigShape[0], 0:self.bigShape[1],
           0:self.bigShape[2]] = self.bigLabels
     self.serialization_test(array, nrag.gridRagStacked2DHdf5)
     nhdf5.closeFile(hidT)
Пример #9
0
    def test_hdf5_rag2d_large(self):
        import nifty.hdf5 as nhdf5

        shape = [5, 6]
        blockShape = chunkShape = shape
        # FIXME  these shapes cause incorrect edges !!!
        # chunkShape = [3, 2]
        # blockShape = [2, 3]

        hidT = nhdf5.createFile(self.path)
        array = nhdf5.Hdf5ArrayUInt32(hidT, "data", shape, chunkShape)

        self.assertEqual(array.shape[0], shape[0])
        self.assertEqual(array.shape[1], shape[1])

        labels = numpy.array([[0, 0, 0, 0, 1, 1],
                              [0, 2, 2, 0, 1, 3],
                              [0, 3, 3, 3, 3, 3],
                              [0, 3, 4, 5, 5, 5],
                              [0, 0, 4, 6, 6, 6]],
                             dtype='uint32')

        self.assertEqual(labels.shape[0], shape[0])
        self.assertEqual(labels.shape[1], shape[1])

        array[0:shape[0], 0:shape[1]] = labels
        rag = nrag.gridRagHdf5(array,
                               numberOfLabels=int(labels.max() + 1),
                               blockShape=blockShape,
                               numberOfThreads=1)

        shouldEdges = [(0, 1),
                       (0, 2),
                       (0, 3),
                       (0, 4),
                       (1, 3),
                       (2, 3),
                       (3, 4),
                       (3, 5),
                       (4, 5),
                       (4, 6),
                       (5, 6)]

        shouldNotEdges = [(0, 6),
                          (0, 5),
                          (1, 6),
                          (1, 5)]

        self.generic_rag_test(rag=rag,
                              numberOfNodes=labels.max() + 1,
                              shouldEdges=shouldEdges,
                              shouldNotEdges=shouldNotEdges)
        nhdf5.closeFile(hidT)
Пример #10
0
    def test_hdf5_rag_3d(self):
        import nifty.hdf5 as nhdf5

        shape = [3, 2, 2]
        chunkShape = [1, 2, 1]
        blockShape = [1, 2, 3]

        hidT = nhdf5.createFile(self.path)
        array = nhdf5.Hdf5ArrayUInt32(hidT, "data", shape, chunkShape)

        self.assertEqual(array.shape[0], shape[0])
        self.assertEqual(array.shape[1], shape[1])
        self.assertEqual(array.shape[2], shape[2])

        labels = [[[0, 1],
                   [0, 0]],
                  [[1, 1],
                   [2, 2]],
                  [[3, 3],
                   [3, 3]]]
        labels = numpy.array(labels, dtype='uint32')

        self.assertEqual(labels.shape[0], shape[0])
        self.assertEqual(labels.shape[1], shape[1])
        self.assertEqual(labels.shape[2], shape[2])

        array[0:shape[0], 0:shape[1], 0:shape[2]] = labels
        rag = nrag.gridRagHdf5(array,
                               numberOfLabels=labels.max() + 1,
                               blockShape=blockShape,
                               numberOfThreads=-1)

        shouldEdges = [(0, 1),
                       (0, 2),
                       (1, 2),
                       (1, 3),
                       (2, 3)]

        shouldNotEdges = [(0, 3)]

        self.generic_rag_test(rag=rag,
                              numberOfNodes=labels.max() + 1,
                              shouldEdges=shouldEdges,
                              shouldNotEdges=shouldNotEdges)
        nhdf5.closeFile(hidT)
Пример #11
0
    def run(self):

        # read stuff from the sub solver
        sub_solver = self.input()['sub_solver']
        sub_results = sub_solver.read('sub_results')
        block_begins = sub_solver.read('block_begins')
        block_ends = sub_solver.read('block_ends')
        sub_nodes = sub_solver.read('sub_nodes')

        has_defects = False
        if PipelineParameter().defectPipeline:
            defect_slices_path = self.input()['defect_slices'].path
            defect_slices = vigra.readHDF5(defect_slices_path, 'defect_slices')
            if defect_slices.size:
                has_defects = True

        # get the rag
        rag = self.input()['rag'].read()

        out_path = self.output().path
        if not os.path.exists(out_path):
            os.mkdir(out_path)

        # iterate over the blocks and serialize the sub-block result
        # for block_id in range(1):
        for block_id in range(len(sub_results)):
            sub_result = {
                sub_nodes[block_id][i]: sub_results[block_id][i]
                for i in range(len(sub_nodes[block_id]))
            }

            print("Saving Block-Result for block %i / %i" %
                  (block_id, len(sub_results)))
            block_begin = block_begins[block_id]
            block_end = block_ends[block_id]

            # save the begin and end coordinates of this block for later use
            block_path = os.path.join(out_path,
                                      'block%i_coordinates.h5' % block_id)
            vigra.writeHDF5(block_begin, block_path, 'block_begin')
            vigra.writeHDF5(block_end, block_path, 'block_end')

            # determine the shape of this subblock
            block_shape = block_end - block_begin
            chunk_shape = [
                1, min(512, block_shape[1]),
                min(512, block_shape[2])
            ]

            # save the segmentation for this subblock
            res_path = os.path.join(out_path,
                                    'block%i_segmentation.h5' % block_id)
            res_file = nh5.createFile(res_path)
            out_array = nh5.Hdf5ArrayUInt32(
                res_file,
                'data',
                block_shape.tolist(),
                chunk_shape,
                compression=PipelineParameter().compressionLevel)

            nrag.projectScalarNodeDataInSubBlock(rag, sub_result, out_array,
                                                 block_begins[block_id],
                                                 block_ends[block_id])

            # if we have defected slices in this sub-block, replace them by an adjacent slice
            if has_defects:

                # project the defected slicces in global coordinates to the subblock coordinates
                this_defect_slices = defect_slices - block_begin[0]
                this_defect_slices = this_defect_slices[np.logical_and(
                    this_defect_slices > 0,
                    this_defect_slices < block_shape[0])]

                # only replace slices if there are any in the subblock
                if this_defect_slices.size:
                    replace_slice = get_replace_slices(this_defect_slices,
                                                       block_shape)
                    for z in this_defect_slices:
                        replace_z = replace_slice[z]
                        workflow_logger.debug(
                            "SubblockSegmentationWorkflow: block %i replacing defected slice %i by %i"
                            % (block_id, z, replace_z))
                        out_array.writeSubarray(
                            [z, 0, 0],
                            out_array.readSubarray([replace_z, 0, 0], [
                                replace_z + 1, block_shape[1], block_shape[2]
                            ]))

            nh5.closeFile(res_file)