예제 #1
0
    def convertStacksToH5(cls, filePaths, stackVolumeCacheDir):
        """
        If any of the files in filePaths appear to be globstrings for a stack,
        convert the given stack to hdf5 format.
        
        Return the filePaths list with globstrings replaced by the paths to the new hdf5 volumes.
        """
        import hashlib
        import pickle
        import h5py
        from lazyflow.graph import Graph
        from lazyflow.operators.ioOperators import OpStackToH5Writer

        filePaths = list(filePaths)
        for i, path in enumerate(filePaths):
            if not path or '*' not in path:
                continue
            globstring = path

            # Embrace paranoia:
            # We want to make sure we never re-use a stale cache file for a new dataset,
            #  even if the dataset is located in the same location as a previous one and has the same globstring!
            # Create a sha-1 of the file name and modification date.
            sha = hashlib.sha1()
            files = [k.replace('\\', '/') for k in glob.glob(path)]
            for f in files:
                sha.update(f)
                sha.update(pickle.dumps(os.stat(f).st_mtime))
            stackFile = sha.hexdigest() + '.h5'
            stackPath = os.path.join(stackVolumeCacheDir,
                                     stackFile).replace('\\', '/')

            # Overwrite original path
            filePaths[i] = stackPath + "/volume/data"

            # Generate the hdf5 if it doesn't already exist
            if os.path.exists(stackPath):
                logger.info(
                    "Using previously generated hdf5 volume for stack {}".
                    format(path))
                logger.info("Volume path: {}".format(filePaths[i]))
            else:
                logger.info("Generating hdf5 volume for stack {}".format(path))
                logger.info("Volume path: {}".format(filePaths[i]))

                if not os.path.exists(stackVolumeCacheDir):
                    os.makedirs(stackVolumeCacheDir)

                with h5py.File(stackPath) as f:
                    # Configure the conversion operator
                    opWriter = OpStackToH5Writer(graph=Graph())
                    opWriter.hdf5Group.setValue(f)
                    opWriter.hdf5Path.setValue("volume/data")
                    opWriter.GlobString.setValue(globstring)

                    # Initiate the write
                    success = opWriter.WriteImage.value
                    assert success, "Something went wrong when generating an hdf5 file from an image sequence."

        return filePaths
예제 #2
0
def convertStacksToH5(filePaths):
    """
    If any of the files in filePaths appear to be globstrings for a stack,
    convert the given stack to hdf5 format.  
    Return the filePaths list with globstrings replaced by the paths to the new hdf5 volumes.
    """
    filePaths = list(filePaths)
    for i, path in enumerate(filePaths):
        if '*' in path:
            globstring = path
            stackPath = os.path.splitext(globstring)[0]
            stackPath.replace('*', "_VOLUME")
            stackPath += ".h5"
            
            # Overwrite original path
            filePaths[i] = stackPath + "/volume/data"

            # Generate the hdf5 if it doesn't already exist
            if not os.path.exists(stackPath):
                with h5py.File(stackPath) as f:
                    # Configure the conversion operator
                    opWriter = OpStackToH5Writer( graph=Graph() )
                    opWriter.hdf5Group.setValue(f)
                    opWriter.hdf5Path.setValue("volume/data")
                    opWriter.GlobString.setValue(globstring)
                    
                    # Initiate the write
                    success = opWriter.WriteImage.value
        
    return filePaths
    def importStackAsLocalDataset(self, info):
        """
        Add the given stack data to the project file as a local dataset.
        Create a datainfo and append it to our operator.
        """
        with Tracer(traceLogger):

            try:
                self.progressSignal.emit(0)

                projectFileHdf5 = self.mainOperator.ProjectFile.value
                topGroup = self.getOrCreateGroup(projectFileHdf5,
                                                 self.topGroupName)
                localDataGroup = self.getOrCreateGroup(topGroup, 'local_data')

                globstring = info.filePath
                info.location = DatasetInfo.Location.ProjectInternal

                opWriter = OpStackToH5Writer(graph=self.mainOperator.graph)
                opWriter.hdf5Group.setValue(localDataGroup)
                opWriter.hdf5Path.setValue(info.datasetId)
                opWriter.GlobString.setValue(globstring)

                # Forward progress from the writer directly to our applet
                opWriter.progressSignal.subscribe(self.progressSignal.emit)

                success = opWriter.WriteImage.value

                numDatasets = len(self.mainOperator.Dataset)
                self.mainOperator.Dataset.resize(numDatasets + 1)
                self.mainOperator.Dataset[numDatasets].setValue(info)
            finally:
                self.progressSignal.emit(100)

            return success
예제 #4
0
    def importStackAsLocalDataset(self, info):
        """
        Add the given stack data to the project file as a local dataset.
        Does not update the topLevelOperator.
        
        :param info: A DatasetInfo object.
                     Note: info.filePath must be a str which lists the stack files, delimited with os.path.pathsep
                     Note: info will be MODIFIED by this function.  Use the modified info when assigning it to a dataset.
        """
        try:
            self.progressSignal.emit(0)

            projectFileHdf5 = self.topLevelOperator.ProjectFile.value
            topGroup = getOrCreateGroup(projectFileHdf5, self.topGroupName)
            localDataGroup = getOrCreateGroup(topGroup, 'local_data')

            globstring = info.filePath
            info.location = DatasetInfo.Location.ProjectInternal
            firstPathParts = PathComponents(
                info.filePath.split(os.path.pathsep)[0])
            info.filePath = firstPathParts.externalDirectory + '/??' + firstPathParts.extension
            info.fromstack = True

            # Use absolute path
            cwd = self.topLevelOperator.WorkingDirectory
            if os.path.pathsep not in globstring and not os.path.isabs(
                    globstring):
                globstring = os.path.normpath(os.path.join(cwd, globstring))

            opWriter = OpStackToH5Writer(parent=self.topLevelOperator.parent,
                                         graph=self.topLevelOperator.graph)
            opWriter.hdf5Group.setValue(localDataGroup)
            opWriter.hdf5Path.setValue(info.datasetId)
            opWriter.GlobString.setValue(globstring)

            # Forward progress from the writer directly to our applet
            opWriter.progressSignal.subscribe(self.progressSignal.emit)

            success = opWriter.WriteImage.value

        finally:
            opWriter.cleanUp()
            self.progressSignal.emit(100)

        return success