コード例 #1
0
    def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File,
                             projectFilePath):
        with Tracer(traceLogger):
            self._projectFilePath = projectFilePath
            self.initWithoutTopGroup(hdf5File, projectFilePath)

            infoDir = topGroup['infos']

            self.mainOperator.Dataset.resize(len(infoDir))
            for index, (infoGroupName,
                        infoGroup) in enumerate(sorted(infoDir.items())):
                datasetInfo = DatasetInfo()

                # Make a reverse-lookup of the location storage strings
                LocationLookup = {
                    v: k
                    for k, v in self.LocationStrings.items()
                }
                datasetInfo.location = LocationLookup[str(
                    infoGroup['location'].value)]

                # Write to the 'private' members to avoid resetting the dataset id
                datasetInfo._filePath = str(infoGroup['filePath'].value)
                datasetInfo._datasetId = str(infoGroup['datasetId'].value)

                # Deserialize the "allow labels" flag
                try:
                    datasetInfo.allowLabels = infoGroup['allowLabels'].value
                except KeyError:
                    pass

                # Deserialize the axisorder (if present)
                try:
                    datasetInfo.axisorder = infoGroup['axisorder'].value
                except KeyError:
                    if ilastik.utility.globals.ImportOptions.default_axis_order is not None:
                        datasetInfo.axisorder = ilastik.utility.globals.ImportOptions.default_axis_order

                # If the data is supposed to be in the project,
                #  check for it now.
                if datasetInfo.location == DatasetInfo.Location.ProjectInternal:
                    if not datasetInfo.datasetId in topGroup[
                            'local_data'].keys():
                        raise RuntimeError(
                            "Corrupt project file.  Could not find data for " +
                            infoGroupName)

                # If the data is supposed to exist outside the project, make sure it really does.
                if datasetInfo.location == DatasetInfo.Location.FileSystem:
                    filePath = PathComponents(
                        datasetInfo.filePath,
                        os.path.split(projectFilePath)[0]).externalPath
                    if not os.path.exists(filePath):
                        raise RuntimeError("Could not find external data: " +
                                           filePath)

                # Give the new info to the operator
                self.mainOperator.Dataset[index].setValue(datasetInfo)

            self._dirty = False
コード例 #2
0
    def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath):
        with Tracer(traceLogger):
            self._projectFilePath = projectFilePath
            self.initWithoutTopGroup(hdf5File, projectFilePath)

            infoDir = topGroup['infos']
            
            self.mainOperator.Dataset.resize( len(infoDir) )
            for index, (infoGroupName, infoGroup) in enumerate( sorted(infoDir.items()) ):
                datasetInfo = DatasetInfo()
    
                # Make a reverse-lookup of the location storage strings            
                LocationLookup = { v:k for k,v in self.LocationStrings.items() }
                datasetInfo.location = LocationLookup[ str(infoGroup['location'].value) ]
                
                # Write to the 'private' members to avoid resetting the dataset id
                datasetInfo._filePath = str(infoGroup['filePath'].value)
                datasetInfo._datasetId = str(infoGroup['datasetId'].value)
    
                # Deserialize the "allow labels" flag
                try:
                    datasetInfo.allowLabels = infoGroup['allowLabels'].value
                except KeyError:
                    pass

                # Deserialize the axisorder (if present)
                try:
                    datasetInfo.axisorder = infoGroup['axisorder'].value
                except KeyError:
                    if ilastik.utility.globals.ImportOptions.default_axis_order is not None:
                        datasetInfo.axisorder = ilastik.utility.globals.ImportOptions.default_axis_order
                
                # If the data is supposed to be in the project,
                #  check for it now.
                if datasetInfo.location == DatasetInfo.Location.ProjectInternal:
                    if not datasetInfo.datasetId in topGroup['local_data'].keys():
                        raise RuntimeError("Corrupt project file.  Could not find data for " + infoGroupName)
    
                # If the data is supposed to exist outside the project, make sure it really does.
                if datasetInfo.location == DatasetInfo.Location.FileSystem:
                    filePath = PathComponents( datasetInfo.filePath, os.path.split(projectFilePath)[0] ).externalPath
                    if not os.path.exists(filePath):
                        raise RuntimeError("Could not find external data: " + filePath)
    
                # Give the new info to the operator
                self.mainOperator.Dataset[index].setValue(datasetInfo)
    
            self._dirty = False
コード例 #3
0
    def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath, headless):
        self._projectFilePath = projectFilePath
        self.initWithoutTopGroup(hdf5File, projectFilePath)
        
        # normally the serializer is not dirty after loading a project file
        # however, when the file was corrupted, the user has the possibility
        # to save the fixed file after loading it.
        dirty = False 
        
        infoDir = topGroup['infos']
        
        self.topLevelOperator.Dataset.resize( len(infoDir) )
        for index, (infoGroupName, infoGroup) in enumerate( sorted(infoDir.items()) ):
            datasetInfo = DatasetInfo()

            # Make a reverse-lookup of the location storage strings            
            LocationLookup = { v:k for k,v in self.LocationStrings.items() }
            datasetInfo.location = LocationLookup[ str(infoGroup['location'].value) ]
            
            # Write to the 'private' members to avoid resetting the dataset id
            datasetInfo._filePath = str(infoGroup['filePath'].value)
            datasetInfo._datasetId = str(infoGroup['datasetId'].value)

            # Deserialize the "allow labels" flag
            try:
                datasetInfo.allowLabels = infoGroup['allowLabels'].value
            except KeyError:
                pass

            # Deserialize the axisorder (if present)
            try:
                datasetInfo.axisorder = infoGroup['axisorder'].value
            except KeyError:
                pass
            
            # If the data is supposed to be in the project,
            #  check for it now.
            if datasetInfo.location == DatasetInfo.Location.ProjectInternal:
                if not datasetInfo.datasetId in topGroup['local_data'].keys():
                    raise RuntimeError("Corrupt project file.  Could not find data for " + infoGroupName)

            # If the data is supposed to exist outside the project, make sure it really does.
            if datasetInfo.location == DatasetInfo.Location.FileSystem:
                pathData = PathComponents( datasetInfo.filePath, os.path.split(projectFilePath)[0])
                filePath = pathData.externalPath
                if not os.path.exists(filePath):
                    if headless:
                        raise RuntimeError("Could not find data at " + filePath)
                    filt = "Image files (" + ' '.join('*.' + x for x in OpDataSelection.SupportedExtensions) + ')'
                    newpath = self.repairFile(filePath, filt)
                    newpath = newpath+pathData.internalPath
                    datasetInfo._filePath = getPathVariants(newpath , os.path.split(projectFilePath)[0])[0]
                    
                    dirty = True
                    
            # Give the new info to the operator
            self.topLevelOperator.Dataset[index].setValue(datasetInfo)

        self._dirty = dirty
コード例 #4
0
    def deserializeFromHdf5(self, hdf5File, projectFilePath):
        with Tracer(traceLogger):
            # Check the overall file version
            ilastikVersion = hdf5File["ilastikVersion"].value

            # This is the v0.5 import deserializer.  Don't work with 0.6 projects (or anything else).
            if ilastikVersion != 0.5:
                return

            # The 'working directory' for the purpose of constructing absolute
            #  paths from relative paths is the project file's directory.
            projectDir = os.path.split(projectFilePath)[0]
            self.mainOperator.WorkingDirectory.setValue(projectDir)

            # These project file inputs are required, but are not used because the data is treated as "external"
            self.mainOperator.ProjectDataGroup.setValue('DataSets')
            self.mainOperator.ProjectFile.setValue(hdf5File)

            # Access the top group and the info group
            try:
                #dataset = hdf5File["DataSets"]["dataItem00"]["data"]
                dataDir = hdf5File["DataSets"]
            except KeyError:
                # If our group (or subgroup) doesn't exist, then make sure the operator is empty
                self.mainOperator.Dataset.resize(0)
                return

            self.mainOperator.Dataset.resize(len(dataDir))
            for index, (datasetDirName,
                        datasetDir) in enumerate(sorted(dataDir.items())):
                datasetInfo = DatasetInfo()

                # Since we are importing from a 0.5 file, all datasets will be external
                #  to the project (pulled in from the old file as hdf5 datasets)
                datasetInfo.location = DatasetInfo.Location.FileSystem

                # Some older versions of ilastik 0.5 stored the data in tzyxc order.
                # Some power-users can enable a command-line flag that tells us to
                #  transpose the data back to txyzc order when we import the old project.
                if ilastik.utility.globals.ImportOptions.default_axis_order is not None:
                    datasetInfo.axisorder = ilastik.utility.globals.ImportOptions.default_axis_order

                # Write to the 'private' members to avoid resetting the dataset id
                totalDatasetPath = projectFilePath + '/DataSets/' + datasetDirName + '/data'
                datasetInfo._filePath = str(totalDatasetPath)
                datasetInfo._datasetId = datasetDirName  # Use the old dataset name as the new dataset id

                # Give the new info to the operator
                self.mainOperator.Dataset[index].setValue(datasetInfo)
コード例 #5
0
 def deserializeFromHdf5(self, hdf5File, projectFilePath):
     with Tracer(traceLogger):
         # Check the overall file version
         ilastikVersion = hdf5File["ilastikVersion"].value
 
         # This is the v0.5 import deserializer.  Don't work with 0.6 projects (or anything else).
         if ilastikVersion != 0.5:
             return
 
         # The 'working directory' for the purpose of constructing absolute 
         #  paths from relative paths is the project file's directory.
         projectDir = os.path.split(projectFilePath)[0]
         self.mainOperator.WorkingDirectory.setValue( projectDir )
 
         # These project file inputs are required, but are not used because the data is treated as "external"
         self.mainOperator.ProjectDataGroup.setValue( 'DataSets' )
         self.mainOperator.ProjectFile.setValue(hdf5File)
 
         # Access the top group and the info group
         try:
             #dataset = hdf5File["DataSets"]["dataItem00"]["data"]
             dataDir = hdf5File["DataSets"]
         except KeyError:
             # If our group (or subgroup) doesn't exist, then make sure the operator is empty
             self.mainOperator.Dataset.resize( 0 )
             return
         
         self.mainOperator.Dataset.resize( len(dataDir) )
         for index, (datasetDirName, datasetDir) in enumerate( sorted(dataDir.items()) ):
             datasetInfo = DatasetInfo()
 
             # Since we are importing from a 0.5 file, all datasets will be external 
             #  to the project (pulled in from the old file as hdf5 datasets)
             datasetInfo.location = DatasetInfo.Location.FileSystem
             
             # Some older versions of ilastik 0.5 stored the data in tzyxc order.
             # Some power-users can enable a command-line flag that tells us to 
             #  transpose the data back to txyzc order when we import the old project. 
             if ilastik.utility.globals.ImportOptions.default_axis_order is not None:
                 datasetInfo.axisorder = ilastik.utility.globals.ImportOptions.default_axis_order
             
             # Write to the 'private' members to avoid resetting the dataset id
             totalDatasetPath = projectFilePath + '/DataSets/' + datasetDirName + '/data'
             datasetInfo._filePath = str(totalDatasetPath)
             datasetInfo._datasetId = datasetDirName # Use the old dataset name as the new dataset id
             
             # Give the new info to the operator
             self.mainOperator.Dataset[index].setValue(datasetInfo)
コード例 #6
0
    def deserializeFromHdf5(self, hdf5File, projectFilePath, headless = False):
        # Check the overall file version
        ilastikVersion = hdf5File["ilastikVersion"].value

        # This is the v0.5 import deserializer.  Don't work with 0.6 projects (or anything else).
        if ilastikVersion != 0.5:
            return

        # The 'working directory' for the purpose of constructing absolute 
        #  paths from relative paths is the project file's directory.
        projectDir = os.path.split(projectFilePath)[0]
        self.topLevelOperator.WorkingDirectory.setValue( projectDir )

        # Access the top group and the info group
        try:
            #dataset = hdf5File["DataSets"]["dataItem00"]["data"]
            dataDir = hdf5File["DataSets"]
        except KeyError:
            # If our group (or subgroup) doesn't exist, then make sure the operator is empty
            self.topLevelOperator.Dataset.resize( 0 )
            return
        
        self.topLevelOperator.Dataset.resize( len(dataDir) )
        for index, (datasetDirName, datasetDir) in enumerate( sorted(dataDir.items()) ):
            datasetInfo = DatasetInfo()

            # We'll set up the link to the dataset in the old project file, 
            #  but we'll set the location to ProjectInternal so that it will 
            #  be copied to the new file when the project is saved.    
            datasetInfo.location = DatasetInfo.Location.ProjectInternal
            
            # Some older versions of ilastik 0.5 stored the data in tzyxc order.
            # Some power-users can enable a command-line flag that tells us to 
            #  transpose the data back to txyzc order when we import the old project.
            default_axis_order = ilastik.utility.globals.ImportOptions.default_axis_order
            if default_axis_order is not None:
                import warnings
                warnings.warn( "Using a strange axis order to import ilastik 0.5 projects: {}".format( default_axis_order ) )
                datasetInfo.axisorder = default_axis_order
            
            # Write to the 'private' members to avoid resetting the dataset id
            totalDatasetPath = projectFilePath + '/DataSets/' + datasetDirName + '/data'
            datasetInfo._filePath = str(totalDatasetPath)
            datasetInfo._datasetId = datasetDirName # Use the old dataset name as the new dataset id
            
            # Give the new info to the operator
            self.topLevelOperator.Dataset[index].setValue(datasetInfo)