示例#1
0
    def serialize(self, group):
        if not self.shouldSerialize(group):
            return
        deleteIfPresent(group, self.name)
        group = getOrCreateGroup(group, self.name)
        mainOperator = self.slot.getRealOperator()

        for i in range(len(mainOperator)):
            subgroup = getOrCreateGroup(group, "{:04}".format(i))

            cleanBlockRois = self.blockslot[i].value
            for roi in cleanBlockRois:
                region_features_arr = self.slot[i](*roi).wait()
                assert region_features_arr.shape == (1, )
                region_features = region_features_arr[0]
                roi_grp = subgroup.create_group(name=str(roi))
                logger.debug('Saving region features into group: "{}"'.format(
                    roi_grp.name))
                for key, val in region_features.iteritems():
                    plugin_group = getOrCreateGroup(roi_grp, key)
                    for featname, featval in val.iteritems():
                        plugin_group.create_dataset(name=featname,
                                                    data=featval)

        self.dirty = False
    def importStackAsLocalDataset(self, info):
        """
        Add the given stack data to the project file as a local dataset.
        Create a datainfo and append it to our operator.
        """
        try:
            self.progressSignal.emit(0)
            
            projectFileHdf5 = self.topLevelOperator.ProjectFile.value
            topGroup = getOrCreateGroup(projectFileHdf5, self.topGroupName)
            localDataGroup = getOrCreateGroup(topGroup, 'local_data')

            globstring = info.filePath
            info.location = DatasetInfo.Location.ProjectInternal
            
            opWriter = OpStackToH5Writer(graph=self.topLevelOperator.graph)
            opWriter.hdf5Group.setValue(localDataGroup)
            opWriter.hdf5Path.setValue(info.datasetId)
            opWriter.GlobString.setValue(globstring)
                
            # Forward progress from the writer directly to our applet                
            opWriter.progressSignal.subscribe( self.progressSignal.emit )
            
            success = opWriter.WriteImage.value
            
            numDatasets = len(self.topLevelOperator.Dataset)
            self.topLevelOperator.Dataset.resize( numDatasets + 1 )
            self.topLevelOperator.Dataset[numDatasets].setValue(info)
            
        finally:
            self.progressSignal.emit(100)

        return success
示例#3
0
    def serialize(self, group):
        if not self.shouldSerialize(group):
            return
        deleteIfPresent(group, self.name)
        group = getOrCreateGroup(group, self.name)
        mainOperator = self.slot.getRealOperator()
        innerops = mainOperator.innerOperators
        for i, op in enumerate(innerops):
            gr = getOrCreateGroup(group, str(i))
            labels_gr = getOrCreateGroup(gr, str("labels"))
            if "labels" in op.Annotations.value.keys():
                for t in op.Annotations.value["labels"].keys():
                    t_gr = getOrCreateGroup(labels_gr, str(t))
                    for oid in op.Annotations.value["labels"][t].keys():
                        l = op.Annotations.value["labels"][t][oid]
                        dset = list(l)
                        if len(dset) > 0:
                            t_gr.create_dataset(name=str(oid), data=dset)

            divisions_gr = getOrCreateGroup(gr, str("divisions"))
            dset = []
            if "divisions" in op.Annotations.value.keys():
                for trackid in op.Annotations.value["divisions"].keys():
                    (children,
                     t_parent) = op.Annotations.value["divisions"][trackid]
                    dset.append([trackid, children[0], children[1], t_parent])
            if len(dset) > 0:
                divisions_gr.create_dataset(name=str(i), data=dset)
        self.dirty = False
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        getOrCreateGroup(topGroup, "local_data")
        deleteIfPresent(topGroup, "Role Names")
        role_names = [name.encode("utf-8") for name in self.topLevelOperator.DatasetRoles.value]
        topGroup.create_dataset("Role Names", data=role_names)

        # Access the info group
        infoDir = getOrCreateGroup(topGroup, "infos")

        # Delete all infos
        infoDir.clear()

        # Rebuild the list of infos
        roleNames = self.topLevelOperator.DatasetRoles.value
        internal_datasets_to_keep = set()
        for laneIndex, multislot in enumerate(self.topLevelOperator.DatasetGroup):
            laneGroupName = "lane{:04d}".format(laneIndex)
            laneGroup = infoDir.create_group(laneGroupName)
            for roleIndex, slot in enumerate(multislot):
                infoGroup = laneGroup.create_group(roleNames[roleIndex])
                if slot.ready():
                    datasetInfo = slot.value
                    if isinstance(datasetInfo, ProjectInternalDatasetInfo):
                        internal_datasets_to_keep.add(hdf5File[datasetInfo.inner_path])
                    for k, v in datasetInfo.to_json_data().items():
                        if v is not None:
                            infoGroup.create_dataset(k, data=v)
        if self.local_data_path.as_posix() in hdf5File:
            for dataset in hdf5File[self.local_data_path.as_posix()].values():
                if dataset not in internal_datasets_to_keep:
                    del hdf5File[dataset.name]
        self._dirty = False
    def serialize(self, group):
        if not self.shouldSerialize(group):
            return
        deleteIfPresent(group, self.name)
        group = getOrCreateGroup(group, self.name)
        mainOperator = self.slot.getRealOperator()
        innerops = mainOperator.innerOperators
        for i, op in enumerate(innerops):
            gr = getOrCreateGroup(group, str(i))
            labels_gr = getOrCreateGroup(gr, str("labels"))
            if "labels" in op.Annotations.value.keys():
                for t in op.Annotations.value["labels"].keys():
                    t_gr = getOrCreateGroup(labels_gr, str(t))
                    for oid in op.Annotations.value["labels"][t].keys():
                        l = op.Annotations.value["labels"][t][oid]
                        dset = list(l)
                        if len(dset) > 0:
                            t_gr.create_dataset(name=str(oid), data=dset)

            divisions_gr = getOrCreateGroup(gr, str("divisions"))
            dset = []
            if "divisions" in op.Annotations.value.keys():
                for trackid in op.Annotations.value["divisions"].keys():
                    (children, t_parent) = op.Annotations.value["divisions"][trackid]
                    dset.append([trackid, children[0], children[1], t_parent])
            if len(dset) > 0:
                divisions_gr.create_dataset(name=str(i), data=dset)
        self.dirty = False
示例#6
0
    def importStackAsLocalDataset(self, info):
        """
        Add the given stack data to the project file as a local dataset.
        Does not update the topLevelOperator
        """
        try:
            self.progressSignal.emit(0)
            
            projectFileHdf5 = self.topLevelOperator.ProjectFile.value
            topGroup = getOrCreateGroup(projectFileHdf5, self.topGroupName)
            localDataGroup = getOrCreateGroup(topGroup, 'local_data')

            globstring = info.filePath
            info.location = DatasetInfo.Location.ProjectInternal
            firstPathParts = PathComponents(info.filePath.split('//')[0])
            info.filePath = firstPathParts.externalDirectory + '/??' + firstPathParts.extension
            
            opWriter = OpStackToH5Writer(graph=self.topLevelOperator.graph)
            opWriter.hdf5Group.setValue(localDataGroup)
            opWriter.hdf5Path.setValue(info.datasetId)
            opWriter.GlobString.setValue(globstring)
                
            # Forward progress from the writer directly to our applet                
            opWriter.progressSignal.subscribe( self.progressSignal.emit )
            
            success = opWriter.WriteImage.value
            
        finally:
            opWriter.cleanUp()
            self.progressSignal.emit(100)

        return success
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        split_settings_grp = getOrCreateGroup(topGroup, "split_settings")

        for laneIndex in range(len( self._topLevelOperator )):
            lane_grp = getOrCreateGroup(split_settings_grp, "{}".format( laneIndex ))
            opLaneView = self._topLevelOperator.getLane(laneIndex)
            if opLaneView.AnnotationFilepath.ready():
                annotation_filepath = opLaneView.AnnotationFilepath.value
                deleteIfPresent( lane_grp, "annotation_filepath" )
                lane_grp.create_dataset("annotation_filepath", data=annotation_filepath)

        # Now save the regular the carving data.        
        super( SplitBodyCarvingSerializer, self )._serializeToHdf5( topGroup, hdf5File, projectFilePath )
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        split_settings_grp = getOrCreateGroup(topGroup, "split_settings")

        for laneIndex in range(len( self._topLevelOperator )):
            lane_grp = getOrCreateGroup(split_settings_grp, "{}".format( laneIndex ))
            opLaneView = self._topLevelOperator.getLane(laneIndex)
            if opLaneView.AnnotationFilepath.ready():
                annotation_filepath = opLaneView.AnnotationFilepath.value
                deleteIfPresent( lane_grp, "annotation_filepath" )
                lane_grp.create_dataset("annotation_filepath", data=annotation_filepath)

        # Now save the regular the carving data.        
        super( SplitBodyCarvingSerializer, self )._serializeToHdf5( topGroup, hdf5File, projectFilePath )
        self.__dirty = False
示例#9
0
 def serialize(self, group):
     deleteIfPresent(group, self.name)
     group = getOrCreateGroup(group, self.name)
     mainOperator = self.slot.getRealOperator()
     innerops = mainOperator.innerOperators
     for i, op in enumerate(innerops):
         gr = getOrCreateGroup(group, str(i))
         for t in op.labels.keys():
             t_gr = getOrCreateGroup(gr, str(t))
             for oid in op.labels[t].keys():
                 l = op.labels[t][oid]
                 dset = list(l)
                 if len(dset) > 0:
                     t_gr.create_dataset(name=str(oid), data=dset)
     self.dirty = False
示例#10
0
 def serialize(self, group):
     deleteIfPresent(group, self.name)
     group = getOrCreateGroup(group, self.name)
     mainOperator = self.slot.getRealOperator()
     innerops = mainOperator.innerOperators
     for i, op in enumerate(innerops):
         gr = getOrCreateGroup(group, str(i))
         for t in op.labels.keys():
             t_gr = getOrCreateGroup(gr, str(t))
             for oid in op.labels[t].keys():
                 l = op.labels[t][oid]
                 dset = list(l)
                 if len(dset) > 0:
                     t_gr.create_dataset(name=str(oid), data=dset)
     self.dirty = False
 def serialize(self, group):
     if not self.shouldSerialize(group):
         return
     deleteIfPresent(group, self.name)
     group = getOrCreateGroup(group, self.name)
     mainOperator = self.slot.getRealOperator()
     for i, op in enumerate(mainOperator.innerOperators):            
         ts = op._processedTimeSteps
         if len(ts) > 0:
             subgroup = getOrCreateGroup(group, str(i))
             subgroup.create_dataset(name='timesteps', data=list(ts))
             
             src = op._mem_h5
             subgroup.copy(src['/TranslationVectors'], subgroup, name='data')                
     self.dirty = False
 def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
     preproc = topGroup
     
     for opPre in self._o.innerOperators:
         mst = opPre._prepData[0]
         
         if mst is not None:
             
             #The values to be saved for sigma and filter are the
             #values of the last valid preprocess
             #!These may differ from the current settings!
             
             deleteIfPresent(preproc, "sigma")
             deleteIfPresent(preproc, "filter")
             deleteIfPresent(preproc, "watershed_source")
             deleteIfPresent(preproc, "invert_watershed_source")
             deleteIfPresent(preproc, "graph")
             
             preproc.create_dataset("sigma",data= opPre.initialSigma)
             preproc.create_dataset("filter",data= opPre.initialFilter)
             ws_source = str(opPre.WatershedSource.value)
             assert isinstance( ws_source, str ), "WatershedSource was {}, but it should be a string.".format( ws_source )
             preproc.create_dataset("watershed_source", data=ws_source)                 
             preproc.create_dataset("invert_watershed_source", data=opPre.InvertWatershedSource.value)
             
             preprocgraph = getOrCreateGroup(preproc, "graph")
             mst.saveH5G(preprocgraph)
         
         opPre._unsavedData = False
示例#13
0
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        preproc = topGroup

        for opPre in self._o.innerOperators:
            mst = opPre._prepData[0]

            if mst is not None:

                #The values to be saved for sigma and filter are the
                #values of the last valid preprocess
                #!These may differ from the current settings!

                deleteIfPresent(preproc, "sigma")
                deleteIfPresent(preproc, "filter")
                deleteIfPresent(preproc, "watershed_source")
                deleteIfPresent(preproc, "invert_watershed_source")
                deleteIfPresent(preproc, "graph")

                preproc.create_dataset("sigma", data=opPre.initialSigma)
                preproc.create_dataset("filter", data=opPre.initialFilter)
                ws_source = str(opPre.WatershedSource.value)
                assert isinstance(
                    ws_source, str
                ), "WatershedSource was {}, but it should be a string.".format(
                    ws_source)
                preproc.create_dataset("watershed_source", data=ws_source)
                preproc.create_dataset("invert_watershed_source",
                                       data=opPre.InvertWatershedSource.value)

                preprocgraph = getOrCreateGroup(preproc, "graph")
                mst.saveH5G(preprocgraph)

            opPre._unsavedData = False
示例#14
0
    def test_getOrCreateGroup_2(self):
        self.assertTrue("b" not in self.tmpFile)

        group = getOrCreateGroup(self.tmpFile, "b")

        self.assertEqual(group.name, "/b")
        self.assertTrue("b" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["b"], h5py.Group))
示例#15
0
    def test_getOrCreateGroup_2(self):
        self.assertTrue("b" not in self.tmpFile)

        group = getOrCreateGroup(self.tmpFile, "b")

        self.assertEqual(group.name, "/b")
        self.assertTrue("b" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["b"], h5py.Group))
示例#16
0
    def test_getOrCreateGroup_3(self):
        self.assertTrue("c" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["c"], h5py.Dataset))

        self.assertRaises(TypeError, lambda : getOrCreateGroup(self.tmpFile, "c"))

        self.assertTrue("c" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["c"], h5py.Dataset))
    def importStackAsLocalDataset(self, info):
        """
        Add the given stack data to the project file as a local dataset.
        Does not update the topLevelOperator.
        
        :param info: A DatasetInfo object.
                     Note: info.filePath must be a str which lists the stack files, delimited with os.path.pathsep
                     Note: info will be MODIFIED by this function.  Use the modified info when assigning it to a dataset.
        """
        try:
            self.progressSignal.emit(0)

            projectFileHdf5 = self.topLevelOperator.ProjectFile.value
            topGroup = getOrCreateGroup(projectFileHdf5, self.topGroupName)
            localDataGroup = getOrCreateGroup(topGroup, 'local_data')

            globstring = info.filePath
            info.location = DatasetInfo.Location.ProjectInternal
            firstPathParts = PathComponents(
                info.filePath.split(os.path.pathsep)[0])
            info.filePath = firstPathParts.externalDirectory + '/??' + firstPathParts.extension
            info.fromstack = True

            # Use absolute path
            cwd = self.topLevelOperator.WorkingDirectory
            if os.path.pathsep not in globstring and not os.path.isabs(
                    globstring):
                globstring = os.path.normpath(os.path.join(cwd, globstring))

            opWriter = OpStackToH5Writer(parent=self.topLevelOperator.parent,
                                         graph=self.topLevelOperator.graph)
            opWriter.hdf5Group.setValue(localDataGroup)
            opWriter.hdf5Path.setValue(info.datasetId)
            opWriter.GlobString.setValue(globstring)

            # Forward progress from the writer directly to our applet
            opWriter.progressSignal.subscribe(self.progressSignal.emit)

            success = opWriter.WriteImage.value

        finally:
            opWriter.cleanUp()
            self.progressSignal.emit(100)

        return success
示例#18
0
    def test_getOrCreateGroup_1(self):
        self.assertTrue("a" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["a"], h5py.Group))

        group = getOrCreateGroup(self.tmpFile, "a")

        self.assertEqual(group.name, "/a")
        self.assertTrue("a" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["a"], h5py.Group))
示例#19
0
 def serialize(self, group):
     if not self.shouldSerialize(group):
         return
     deleteIfPresent(group, self.name)
     group = getOrCreateGroup(group, self.name)
     mainOperator = self.slot.getRealOperator()
     innerops = mainOperator.innerOperators
     for i, op in enumerate(innerops):
         gr = getOrCreateGroup(group, str(i))
         for t in list(op.disappearances.keys()):
             t_gr = getOrCreateGroup(gr, str(t))
             for oid in list(op.disappearances[t].keys()):
                 oid_gr = getOrCreateGroup(t_gr, str(oid))
                 for track in list(op.disappearances[t][oid].keys()):
                     app = op.disappearances[t][oid][track]
                     if app:
                         oid_gr.create_dataset(name=str(track), data=app)
     self.dirty = False
示例#20
0
    def test_getOrCreateGroup_1(self):
        self.assertTrue("a" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["a"], h5py.Group))

        group = getOrCreateGroup(self.tmpFile, "a")

        self.assertEqual(group.name, "/a")
        self.assertTrue("a" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["a"], h5py.Group))
 def serialize(self, group):
     if not self.shouldSerialize(group):
         return
     deleteIfPresent(group, self.name)
     group = getOrCreateGroup(group, self.name)
     mainOperator = self.slot.getRealOperator()
     innerops = mainOperator.innerOperators
     for i, op in enumerate(innerops):
         gr = getOrCreateGroup(group, str(i))
         for t in list(op.disappearances.keys()):
             t_gr = getOrCreateGroup(gr, str(t))
             for oid in list(op.disappearances[t].keys()):
                 oid_gr = getOrCreateGroup(t_gr, str(oid))
                 for track in list(op.disappearances[t][oid].keys()):
                     app = op.disappearances[t][oid][track]
                     if app:
                         oid_gr.create_dataset(name=str(track), data=app)
     self.dirty = False
示例#22
0
    def test_getOrCreateGroup_3(self):
        self.assertTrue("c" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["c"], h5py.Dataset))

        self.assertRaises(TypeError,
                          lambda: getOrCreateGroup(self.tmpFile, "c"))

        self.assertTrue("c" in self.tmpFile)
        self.assertTrue(isinstance(self.tmpFile["c"], h5py.Dataset))
示例#23
0
 def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
     obj = getOrCreateGroup(topGroup, "objects")
     for imageIndex, opCarving in enumerate( self._o.opCarving.innerOperators ):
         mst = opCarving._mst 
         for name in opCarving._dirtyObjects:
             print "[CarvingSerializer] serializing %s" % name
            
             if name in obj and name in mst.object_seeds_fg_voxels: 
                 #group already exists
                 print "  -> changed"
             elif name not in mst.object_seeds_fg_voxels:
                 print "  -> deleted"
             else:
                 print "  -> added"
                 
             g = getOrCreateGroup(obj, name)
             deleteIfPresent(g, "fg_voxels")
             deleteIfPresent(g, "bg_voxels")
             deleteIfPresent(g, "sv")
             deleteIfPresent(g, "bg_prio")
             deleteIfPresent(g, "no_bias_below")
             
             if not name in mst.object_seeds_fg_voxels:
                 #this object was deleted
                 deleteIfPresent(obj, name)
                 continue
            
             v = mst.object_seeds_fg_voxels[name]
             v = [v[i][:,numpy.newaxis] for i in range(3)]
             v = numpy.concatenate(v, axis=1)
             g.create_dataset("fg_voxels", data=v)
             v = mst.object_seeds_bg_voxels[name]
             v = [v[i][:,numpy.newaxis] for i in range(3)]
             v = numpy.concatenate(v, axis=1)
             g.create_dataset("bg_voxels", data=v)
             g.create_dataset("sv", data=mst.object_lut[name])
             
             d1 = numpy.asarray(mst.bg_priority[name], dtype=numpy.float32)
             d2 = numpy.asarray(mst.no_bias_below[name], dtype=numpy.int32)
             g.create_dataset("bg_prio", data=d1)
             g.create_dataset("no_bias_below", data=d2)
             
         opCarving._dirtyObjects = set()
示例#24
0
    def importStackAsLocalDataset(self, info):
        """
        Add the given stack data to the project file as a local dataset.
        Does not update the topLevelOperator.
        
        :param info: A DatasetInfo object.
                     Note: info.filePath must be a str which lists the stack files, delimited with os.path.pathsep
                     Note: info will be MODIFIED by this function.  Use the modified info when assigning it to a dataset.
        """
        try:
            self.progressSignal.emit(0)
            
            projectFileHdf5 = self.topLevelOperator.ProjectFile.value
            topGroup = getOrCreateGroup(projectFileHdf5, self.topGroupName)
            localDataGroup = getOrCreateGroup(topGroup, 'local_data')

            globstring = info.filePath
            info.location = DatasetInfo.Location.ProjectInternal
            firstPathParts = PathComponents(info.filePath.split(os.path.pathsep)[0])
            info.filePath = firstPathParts.externalDirectory + '/??' + firstPathParts.extension
            info.fromstack = True

            # Use absolute path
            cwd = self.topLevelOperator.WorkingDirectory
            if os.path.pathsep not in globstring and not os.path.isabs(globstring):
                globstring = os.path.normpath( os.path.join(cwd, globstring) )
            
            opWriter = OpStackToH5Writer(parent=self.topLevelOperator.parent, graph=self.topLevelOperator.graph)
            opWriter.hdf5Group.setValue(localDataGroup)
            opWriter.hdf5Path.setValue(info.datasetId)
            opWriter.GlobString.setValue(globstring)
                
            # Forward progress from the writer directly to our applet                
            opWriter.progressSignal.subscribe( self.progressSignal.emit )
            
            success = opWriter.WriteImage.value
            
        finally:
            opWriter.cleanUp()
            self.progressSignal.emit(100)

        return success
    def serialize(self, group):
        if not self.shouldSerialize(group):
            return
        deleteIfPresent(group, self.name)
        group = getOrCreateGroup(group, self.name)
        mainOperator = self.slot.getRealOperator()

        for i in range(len(mainOperator)):
            subgroup = getOrCreateGroup(group, str(i))

            cleanBlockRois = self.blockslot[i].value
            for roi in cleanBlockRois:
                region_features_arr = self.outslot[i]( *roi ).wait()
                assert region_features_arr.shape == (1,)
                region_features = region_features_arr[0]
                roi_grp = subgroup.create_group(name=str(roi))
                logger.debug('Saving region features into group: "{}"'.format( roi_grp.name ))
                for key, val in region_features.iteritems():
                    roi_grp.create_dataset(name=key, data=val)

        self.dirty = False
示例#26
0
 def serialize(self, group):
     deleteIfPresent(group, self.name)
     group = getOrCreateGroup(group, self.name)
     mainOperator = self.slot.getRealOperator()
     innerops = mainOperator.innerOperators
     for i, op in enumerate(innerops):
         dset = []
         for trackid in op.divisions.keys():
             (children, t_parent) = op.divisions[trackid]
             dset.append([trackid, children[0], children[1], t_parent])
         if len(dset) > 0:
             group.create_dataset(name=str(i), data=dset)
     self.dirty = False
示例#27
0
 def serialize(self, group):
     deleteIfPresent(group, self.name)
     group = getOrCreateGroup(group, self.name)
     mainOperator = self.slot.getRealOperator()
     innerops = mainOperator.innerOperators
     for i, op in enumerate(innerops):
         dset = []
         for trackid in op.divisions.keys():
             (children, t_parent) = op.divisions[trackid]
             dset.append([trackid, children[0], children[1], t_parent])
         if len(dset) > 0:
             group.create_dataset(name=str(i), data=dset)
     self.dirty = False
示例#28
0
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        preproc = topGroup

        for opPre in self._o.innerOperators:
            mst = opPre._prepData[0]

            if mst is not None:

                # The values to be saved for sigma and filter are the
                # values of the last valid preprocess
                #!These may differ from the current settings!

                deleteIfPresent(preproc, "sigma")
                deleteIfPresent(preproc, "filter")
                deleteIfPresent(preproc, "graph")

                preproc.create_dataset("sigma", data=opPre.initialSigma)
                preproc.create_dataset("filter", data=opPre.initialFilter)

                preprocgraph = getOrCreateGroup(preproc, "graph")
                mst.saveH5G(preprocgraph)

            opPre._unsavedData = False
示例#29
0
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        preproc = topGroup

        for opPre in self._o.innerOperators:
            mst = opPre._prepData[0]

            if mst is not None:

                # The values to be saved for sigma and filter are the
                # values of the last valid preprocess
                #!These may differ from the current settings!

                deleteIfPresent(preproc, "sigma")
                deleteIfPresent(preproc, "filter")
                deleteIfPresent(preproc, "graph")

                preproc.create_dataset("sigma", data=opPre.initialSigma)
                preproc.create_dataset("filter", data=opPre.initialFilter)

                preprocgraph = getOrCreateGroup(preproc, "graph")
                mst.saveH5G(preprocgraph)

            opPre._unsavedData = False
示例#30
0
     
     def loadProject(shell):
         shell.openProjectFile(projectFilename)
     
     workflowKwargs={'hintoverlayFile' : options.hintoverlayFile,
                     'pmapoverlayFile' : options.pmapoverlayFile }
     startShellGui( functools.partial(CarvingWorkflow, **workflowKwargs), loadProject)
     
 elif len(args) == 2:
     
     carvingGraphFilename = os.path.abspath(args[0]).replace("\\","/")
     projectFilename = args[1]
     
     projectFile = h5py.File(projectFilename)
     
     preproc = getOrCreateGroup(projectFile,"preprocessing")
     
     deleteIfPresent(preproc, "sigma")
     deleteIfPresent(preproc, "filter")
     deleteIfPresent(preproc, "StorageVersion")
     deleteIfPresent(preproc, "graph")
     deleteIfPresent(preproc, "graphfile")
     
     preproc.create_dataset("sigma",data= 1.6)
     preproc.create_dataset("filter",data= 0)
     preproc.create_dataset("graphfile",data = carvingGraphFilename)
     preproc.create_dataset("StorageVersion",data = 0.1)
     
     preproc = getOrCreateGroup(projectFile,"preprocessing")
     dataSelect = getOrCreateGroup(projectFile,"Input Data")
     dataInfo = getOrCreateGroup(dataSelect,"infos")
示例#31
0
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        obj = getOrCreateGroup(topGroup, "objects")
        for imageIndex, opCarving in enumerate( self._o.innerOperators ):
            mst = opCarving._mst

            # Populate a list of objects to save:
            objects_to_save = set(list(mst.object_names.keys()))
            objects_already_saved = set(list(topGroup["objects"]))
            # 1.) all objects that are in mst.object_names that are not in saved
            objects_to_save = objects_to_save.difference(objects_already_saved)

            # 2.) add opCarving._dirtyObjects:
            objects_to_save = objects_to_save.union(opCarving._dirtyObjects)

            for name in objects_to_save:
                logger.info( "[CarvingSerializer] serializing %s" % name )
               
                if name in obj and name in mst.object_seeds_fg_voxels: 
                    #group already exists
                    logger.info( "  -> changed" )
                elif name not in mst.object_seeds_fg_voxels:
                    logger.info( "  -> deleted" )
                else:
                    logger.info( "  -> added" )
                    
                g = getOrCreateGroup(obj, name)
                deleteIfPresent(g, "fg_voxels")
                deleteIfPresent(g, "bg_voxels")
                deleteIfPresent(g, "sv")
                deleteIfPresent(g, "bg_prio")
                deleteIfPresent(g, "no_bias_below")
                
                if not name in mst.object_seeds_fg_voxels:
                    #this object was deleted
                    deleteIfPresent(obj, name)
                    continue
               
                v = mst.object_seeds_fg_voxels[name]
                v = [v[i][:,numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                g.create_dataset("fg_voxels", data=v)
                v = mst.object_seeds_bg_voxels[name]
                v = [v[i][:,numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                g.create_dataset("bg_voxels", data=v)
                g.create_dataset("sv", data=mst.object_lut[name])
                
                d1 = numpy.asarray(mst.bg_priority[name], dtype=numpy.float32)
                d2 = numpy.asarray(mst.no_bias_below[name], dtype=numpy.int32)
                g.create_dataset("bg_prio", data=d1)
                g.create_dataset("no_bias_below", data=d2)
                
            opCarving._dirtyObjects = set()
        
            # save current seeds
            deleteIfPresent(topGroup, "fg_voxels")
            deleteIfPresent(topGroup, "bg_voxels")

            fg_voxels, bg_voxels = opCarving.get_label_voxels()
            if fg_voxels is None:
                return

            if fg_voxels[0].shape[0] > 0:
                v = [fg_voxels[i][:,numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                topGroup.create_dataset("fg_voxels", data = v)

            if bg_voxels[0].shape[0] > 0:
                v = [bg_voxels[i][:,numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                topGroup.create_dataset("bg_voxels", data = v)

            logger.info( "saved seeds" )
示例#32
0
     
     def loadProject(shell):
         shell.openProjectFile(projectFilename)
     
     workflowKwargs={'hintoverlayFile' : options.hintoverlayFile,
                     'pmapoverlayFile' : options.pmapoverlayFile }
     startShellGui( functools.partial(CarvingWorkflow, **workflowKwargs), loadProject)
     
 elif len(args) == 2:
     
     carvingGraphFilename = os.path.abspath(args[0]).replace("\\","/")
     projectFilename = args[1]
     
     projectFile = h5py.File(projectFilename)
     
     preproc = getOrCreateGroup(projectFile,"preprocessing")
     
     deleteIfPresent(preproc, "sigma")
     deleteIfPresent(preproc, "filter")
     deleteIfPresent(preproc, "StorageVersion")
     deleteIfPresent(preproc, "graph")
     deleteIfPresent(preproc, "graphfile")
     
     preproc.create_dataset("sigma",data= 1.6)
     preproc.create_dataset("filter",data= 0)
     preproc.create_dataset("graphfile",data = carvingGraphFilename)
     preproc.create_dataset("StorageVersion",data = 0.1)
     
     preproc = getOrCreateGroup(projectFile,"preprocessing")
     dataSelect = getOrCreateGroup(projectFile,"Input Data")
     dataInfo = getOrCreateGroup(dataSelect,"infos")
示例#33
0
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        # Write any missing local datasets to the local_data group
        localDataGroup = getOrCreateGroup(topGroup, 'local_data')
        wroteInternalData = False
        for laneIndex, multislot in enumerate(self.topLevelOperator.DatasetGroup):
            for roleIndex, slot in enumerate( multislot ):
                if not slot.ready():
                    continue
                info = slot.value
                # If this dataset should be stored in the project, but it isn't there yet
                if  info.location == DatasetInfo.Location.ProjectInternal \
                and info.datasetId not in localDataGroup.keys():
                    # Obtain the data from the corresponding output and store it to the project.
                    dataSlot = self.topLevelOperator._NonTransposedImageGroup[laneIndex][roleIndex]

                    try:    
                        opWriter = OpH5WriterBigDataset(parent=self.topLevelOperator.parent, graph=self.topLevelOperator.graph)
                        opWriter.CompressionEnabled.setValue(False) # Compression slows down browsing a lot, and raw data tends to be noisy and doesn't compress very well, anyway.
                        opWriter.hdf5File.setValue( localDataGroup )
                        opWriter.hdf5Path.setValue( info.datasetId )
                        opWriter.Image.connect(dataSlot)
        
                        # Trigger the copy
                        success = opWriter.WriteImage.value
                        assert success
                    finally:
                        opWriter.cleanUp()
    
                    # Add axistags and drange attributes, in case someone uses this dataset outside ilastik
                    localDataGroup[info.datasetId].attrs['axistags'] = dataSlot.meta.axistags.toJSON()
                    if dataSlot.meta.drange is not None:
                        localDataGroup[info.datasetId].attrs['drange'] = dataSlot.meta.drange
    
                    # Make sure the dataSlot's axistags are updated with the dataset as we just wrote it
                    # (The top-level operator may use an OpReorderAxes, which changed the axisorder)
                    info.axistags = dataSlot.meta.axistags
    
                    wroteInternalData = True

        # Construct a list of all the local dataset ids we want to keep
        localDatasetIds = set()
        for laneIndex, multislot in enumerate(self.topLevelOperator.DatasetGroup):
            for roleIndex, slot in enumerate(multislot):
                if slot.ready() and slot.value.location == DatasetInfo.Location.ProjectInternal:
                    localDatasetIds.add( slot.value.datasetId )
        
        # Delete any datasets in the project that aren't needed any more
        for datasetName in localDataGroup.keys():
            if datasetName not in localDatasetIds:
                del localDataGroup[datasetName]

        if wroteInternalData:
            # We can only re-configure the operator if we're not saving a snapshot
            # We know we're saving a snapshot if the project file isn't the one we deserialized with.
            if self._projectFilePath is None or self._projectFilePath == projectFilePath:
                # Force the operator to setupOutputs() again so it gets data from the project, not external files
                firstInfo = self.topLevelOperator.DatasetGroup[0][0].value
                self.topLevelOperator.DatasetGroup[0][0].setValue(firstInfo, check_changed=False)

        deleteIfPresent(topGroup, 'Role Names')
        topGroup.create_dataset('Role Names', data=self.topLevelOperator.DatasetRoles.value)

        # Access the info group
        infoDir = getOrCreateGroup(topGroup, 'infos')
        
        # Delete all infos
        for infoName in infoDir.keys():
            del infoDir[infoName]
                
        # Rebuild the list of infos
        roleNames = self.topLevelOperator.DatasetRoles.value
        for laneIndex, multislot in enumerate(self.topLevelOperator.DatasetGroup):
            laneGroupName = 'lane{:04d}'.format(laneIndex)
            laneGroup = infoDir.create_group( laneGroupName )
            
            for roleIndex, slot in enumerate(multislot):
                infoGroup = laneGroup.create_group( roleNames[roleIndex] )
                if slot.ready():
                    datasetInfo = slot.value
                    locationString = self.LocationStrings[datasetInfo.location]
                    infoGroup.create_dataset('location', data=locationString)
                    infoGroup.create_dataset('filePath', data=datasetInfo.filePath)
                    infoGroup.create_dataset('datasetId', data=datasetInfo.datasetId)
                    infoGroup.create_dataset('allowLabels', data=datasetInfo.allowLabels)
                    infoGroup.create_dataset('nickname', data=datasetInfo.nickname)
                    infoGroup.create_dataset('fromstack', data=datasetInfo.fromstack)
                    if datasetInfo.drange is not None:
                        infoGroup.create_dataset('drange', data=datasetInfo.drange)

                    # Pull the axistags from the NonTransposedImage, 
                    #  which is what the image looks like before 'forceAxisOrder' is applied, 
                    #  and before 'c' is automatically appended
                    axistags = self.topLevelOperator._NonTransposedImageGroup[laneIndex][roleIndex].meta.axistags
                    infoGroup.create_dataset('axistags', data=axistags.toJSON())
                    axisorder = "".join(tag.key for tag in axistags)
                    infoGroup.create_dataset('axisorder', data=axisorder)
                    if datasetInfo.subvolume_roi is not None:
                        infoGroup.create_dataset('subvolume_roi', data=datasetInfo.subvolume_roi)

        self._dirty = False
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        # Write any missing local datasets to the local_data group
        localDataGroup = getOrCreateGroup(topGroup, 'local_data')
        wroteInternalData = False
        for index, slot in enumerate(self.topLevelOperator.Dataset):
            info = slot.value
            # If this dataset should be stored in the project, but it isn't there yet
            if  info.location == DatasetInfo.Location.ProjectInternal \
            and info.datasetId not in localDataGroup.keys():
                # Obtain the data from the corresponding output and store it to the project.
                dataSlot = self.topLevelOperator.Image[index]

                opWriter = OpH5WriterBigDataset(graph=self.topLevelOperator.graph)
                opWriter.hdf5File.setValue( localDataGroup )
                opWriter.hdf5Path.setValue( info.datasetId )
                opWriter.Image.connect(dataSlot)

                # Trigger the copy
                success = opWriter.WriteImage.value
                assert success

                # Add the axistags attribute to the dataset we just created
                localDataGroup[info.datasetId].attrs['axistags'] = dataSlot.meta.axistags.toJSON()

                # Update the dataset info with no path, just filename base to remind us what this data is
                # (operator will react to the change when we call setValue(), below)
                # Directly set the private member to avoid getting a new datasetid
                info._filePath = PathComponents(info.filePath).filenameBase
                wroteInternalData = True

        # Construct a list of all the local dataset ids we want to keep
        localDatasetIds = [ slot.value.datasetId
                             for index, slot 
                             in enumerate(self.topLevelOperator.Dataset)
                             if slot.value.location == DatasetInfo.Location.ProjectInternal ]

        # Delete any datasets in the project that aren't needed any more
        for datasetName in localDataGroup.keys():
            if datasetName not in localDatasetIds:
                del localDataGroup[datasetName]

        if wroteInternalData:
            # We can only re-configure the operator if we're not saving a snapshot
            # We know we're saving a snapshot if the project file isn't the one we deserialized with.
            if self._projectFilePath is None or self._projectFilePath == projectFilePath:
                # Force the operator to setupOutputs() again so it gets data from the project, not external files
                firstInfo = self.topLevelOperator.Dataset[0].value
                self.topLevelOperator.Dataset[0].setValue(firstInfo, False)

        # Access the info group
        infoDir = getOrCreateGroup(topGroup, 'infos')
        
        # Delete all infos
        for infoName in infoDir.keys():
            del infoDir[infoName]
                
        # Rebuild the list of infos
        for index, slot in enumerate(self.topLevelOperator.Dataset):
            infoGroup = infoDir.create_group('info{:04d}'.format(index))
            datasetInfo = slot.value
            locationString = self.LocationStrings[datasetInfo.location]
            infoGroup.create_dataset('location', data=locationString)
            infoGroup.create_dataset('filePath', data=datasetInfo.filePath)
            infoGroup.create_dataset('datasetId', data=datasetInfo.datasetId)
            infoGroup.create_dataset('allowLabels', data=datasetInfo.allowLabels)
            if datasetInfo.axisorder is not None:
                infoGroup.create_dataset('axisorder', data=datasetInfo.axisorder)
        
        self._dirty = False
示例#35
0
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        obj = getOrCreateGroup(topGroup, "objects")
        for imageIndex, opCarving in enumerate(self._o.innerOperators):
            mst = opCarving._mst

            if mst is None:
                # Nothing to save
                return
            # Populate a list of objects to save:
            objects_to_save = set(list(mst.object_names.keys()))
            objects_already_saved = set(list(topGroup["objects"]))
            # 1.) all objects that are in mst.object_names that are not in saved
            objects_to_save = objects_to_save.difference(objects_already_saved)

            # 2.) add opCarving._dirtyObjects:
            objects_to_save = objects_to_save.union(opCarving._dirtyObjects)

            for name in objects_to_save:
                logger.info("[CarvingSerializer] serializing %s" % name)

                if name in obj and name in mst.object_seeds_fg_voxels:
                    # group already exists
                    logger.info("  -> changed")
                elif name not in mst.object_seeds_fg_voxels:
                    logger.info("  -> deleted")
                else:
                    logger.info("  -> added")

                if name not in mst.object_seeds_fg_voxels:
                    # this object was deleted
                    deleteIfPresent(obj, name)
                    continue

                g = getOrCreateGroup(obj, name)
                deleteIfPresent(g, "fg_voxels")
                deleteIfPresent(g, "bg_voxels")
                deleteIfPresent(g, "sv")
                deleteIfPresent(g, "bg_prio")
                deleteIfPresent(g, "no_bias_below")

                v = mst.object_seeds_fg_voxels[name]
                v = [v[i][:, numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                g.create_dataset("fg_voxels", data=v)
                v = mst.object_seeds_bg_voxels[name]
                v = [v[i][:, numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                g.create_dataset("bg_voxels", data=v)
                g.create_dataset("sv", data=mst.object_lut[name])

                d1 = numpy.asarray(mst.bg_priority[name], dtype=numpy.float32)
                d2 = numpy.asarray(mst.no_bias_below[name], dtype=numpy.int32)
                g.create_dataset("bg_prio", data=d1)
                g.create_dataset("no_bias_below", data=d2)

            opCarving._dirtyObjects = set()

            # save current seeds
            deleteIfPresent(topGroup, "fg_voxels")
            deleteIfPresent(topGroup, "bg_voxels")

            fg_voxels, bg_voxels = opCarving.get_label_voxels()
            if fg_voxels is None:
                return

            if fg_voxels[0].shape[0] > 0:
                v = [fg_voxels[i][:, numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                topGroup.create_dataset("fg_voxels", data=v)

            if bg_voxels[0].shape[0] > 0:
                v = [bg_voxels[i][:, numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                topGroup.create_dataset("bg_voxels", data=v)

            logger.info("saved seeds")
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        # Write any missing local datasets to the local_data group
        localDataGroup = getOrCreateGroup(topGroup, 'local_data')
        wroteInternalData = False
        for laneIndex, multislot in enumerate(self.topLevelOperator.DatasetGroup):
            for roleIndex, slot in enumerate( multislot ):
                if not slot.ready():
                    continue
                info = slot.value
                # If this dataset should be stored in the project, but it isn't there yet
                if  info.location == DatasetInfo.Location.ProjectInternal \
                and info.datasetId not in localDataGroup.keys():
                    # Obtain the data from the corresponding output and store it to the project.
                    dataSlot = self.topLevelOperator._NonTransposedImageGroup[laneIndex][roleIndex]

                    try:    
                        opWriter = OpH5WriterBigDataset(parent=self.topLevelOperator.parent, graph=self.topLevelOperator.graph)
                        opWriter.CompressionEnabled.setValue(False) # Compression slows down browsing a lot, and raw data tends to be noisy and doesn't compress very well, anyway.
                        opWriter.hdf5File.setValue( localDataGroup )
                        opWriter.hdf5Path.setValue( info.datasetId )
                        opWriter.Image.connect(dataSlot)
        
                        # Trigger the copy
                        success = opWriter.WriteImage.value
                        assert success
                    finally:
                        opWriter.cleanUp()
    
                    # Add axistags and drange attributes, in case someone uses this dataset outside ilastik
                    localDataGroup[info.datasetId].attrs['axistags'] = dataSlot.meta.axistags.toJSON()
                    if dataSlot.meta.drange is not None:
                        localDataGroup[info.datasetId].attrs['drange'] = dataSlot.meta.drange
    
                    # Make sure the dataSlot's axistags are updated with the dataset as we just wrote it
                    # (The top-level operator may use an OpReorderAxes, which changed the axisorder)
                    info.axistags = dataSlot.meta.axistags
    
                    wroteInternalData = True

        # Construct a list of all the local dataset ids we want to keep
        localDatasetIds = set()
        for laneIndex, multislot in enumerate(self.topLevelOperator.DatasetGroup):
            for roleIndex, slot in enumerate(multislot):
                if slot.ready() and slot.value.location == DatasetInfo.Location.ProjectInternal:
                    localDatasetIds.add( slot.value.datasetId )
        
        # Delete any datasets in the project that aren't needed any more
        for datasetName in localDataGroup.keys():
            if datasetName not in localDatasetIds:
                del localDataGroup[datasetName]

        if wroteInternalData:
            # We can only re-configure the operator if we're not saving a snapshot
            # We know we're saving a snapshot if the project file isn't the one we deserialized with.
            if self._projectFilePath is None or self._projectFilePath == projectFilePath:
                # Force the operator to setupOutputs() again so it gets data from the project, not external files
                firstInfo = self.topLevelOperator.DatasetGroup[0][0].value
                self.topLevelOperator.DatasetGroup[0][0].setValue(firstInfo, check_changed=False)

        deleteIfPresent(topGroup, 'Role Names')
        topGroup.create_dataset('Role Names', data=self.topLevelOperator.DatasetRoles.value)

        # Access the info group
        infoDir = getOrCreateGroup(topGroup, 'infos')
        
        # Delete all infos
        for infoName in infoDir.keys():
            del infoDir[infoName]
                
        # Rebuild the list of infos
        roleNames = self.topLevelOperator.DatasetRoles.value
        for laneIndex, multislot in enumerate(self.topLevelOperator.DatasetGroup):
            laneGroupName = 'lane{:04d}'.format(laneIndex)
            laneGroup = infoDir.create_group( laneGroupName )
            
            for roleIndex, slot in enumerate(multislot):
                infoGroup = laneGroup.create_group( roleNames[roleIndex] )
                if slot.ready():
                    datasetInfo = slot.value
                    locationString = self.LocationStrings[datasetInfo.location]
                    infoGroup.create_dataset('location', data=locationString)
                    infoGroup.create_dataset('filePath', data=datasetInfo.filePath)
                    infoGroup.create_dataset('datasetId', data=datasetInfo.datasetId)
                    infoGroup.create_dataset('allowLabels', data=datasetInfo.allowLabels)
                    infoGroup.create_dataset('nickname', data=datasetInfo.nickname)
                    if datasetInfo.drange is not None:
                        infoGroup.create_dataset('drange', data=datasetInfo.drange)
                    if datasetInfo.axistags is not None:
                        infoGroup.create_dataset('axistags', data=datasetInfo.axistags.toJSON())
                        axisorder = "".join(tag.key for tag in datasetInfo.axistags)
                        infoGroup.create_dataset('axisorder', data=axisorder)

        self._dirty = False
示例#37
0
    def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        obj = getOrCreateGroup(topGroup, "objects")
        for imageIndex, opCarving in enumerate(self._o.innerOperators):
            mst = opCarving._mst
            for name in opCarving._dirtyObjects:
                print "[CarvingSerializer] serializing %s" % name

                if name in obj and name in mst.object_seeds_fg_voxels:
                    #group already exists
                    print "  -> changed"
                elif name not in mst.object_seeds_fg_voxels:
                    print "  -> deleted"
                else:
                    print "  -> added"

                g = getOrCreateGroup(obj, name)
                deleteIfPresent(g, "fg_voxels")
                deleteIfPresent(g, "bg_voxels")
                deleteIfPresent(g, "sv")
                deleteIfPresent(g, "bg_prio")
                deleteIfPresent(g, "no_bias_below")

                if not name in mst.object_seeds_fg_voxels:
                    #this object was deleted
                    deleteIfPresent(obj, name)
                    continue

                v = mst.object_seeds_fg_voxels[name]
                v = [v[i][:, numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                g.create_dataset("fg_voxels", data=v)
                v = mst.object_seeds_bg_voxels[name]
                v = [v[i][:, numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                g.create_dataset("bg_voxels", data=v)
                g.create_dataset("sv", data=mst.object_lut[name])

                d1 = numpy.asarray(mst.bg_priority[name], dtype=numpy.float32)
                d2 = numpy.asarray(mst.no_bias_below[name], dtype=numpy.int32)
                g.create_dataset("bg_prio", data=d1)
                g.create_dataset("no_bias_below", data=d2)

            opCarving._dirtyObjects = set()

            # save current seeds
            deleteIfPresent(topGroup, "fg_voxels")
            deleteIfPresent(topGroup, "bg_voxels")

            fg_voxels, bg_voxels = opCarving.get_label_voxels()
            if fg_voxels is None:
                return

            if fg_voxels[0].shape[0] > 0:
                v = [fg_voxels[i][:, numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                topGroup.create_dataset("fg_voxels", data=v)

            if bg_voxels[0].shape[0] > 0:
                v = [bg_voxels[i][:, numpy.newaxis] for i in range(3)]
                v = numpy.concatenate(v, axis=1)
                topGroup.create_dataset("bg_voxels", data=v)

            print "saved seeds"