Beispiel #1
0
def _prepare_auto_open_project(parsed_args):
    if parsed_args.project is None:
        return None

    from lazyflow.utility.pathHelpers import PathComponents, isUrl

    # Make sure project file exists.
    if not isUrl(parsed_args.project) and not os.path.exists(
            parsed_args.project):
        raise RuntimeError("Project file '" + parsed_args.project +
                           "' does not exist.")

    parsed_args.project = os.path.expanduser(parsed_args.project)
    # convert path to convenient format
    path = PathComponents(parsed_args.project).totalPath()

    # readonly
    if parsed_args.readonly is None:
        parsed_args.readonly = parsed_args.headless

    def loadProject(shell):
        # This should work for both the IlastikShell and the HeadlessShell
        shell.openProjectFile(path, parsed_args.readonly)

    return loadProject
    def _readDatasetInfo(self, infoGroup, projectFilePath, headless):
        # Unready datasets are represented with an empty group.
        if len(infoGroup) == 0:
            return None, False

        if "__class__" in infoGroup:
            info_class = self.InfoClassNames[infoGroup["__class__"][(
            )].decode("utf-8")]
        else:  # legacy support
            location = infoGroup["location"][()].decode("utf-8")
            if location == "FileSystem":  # legacy support: a lot of DatasetInfo types are saved as "FileSystem"
                filePath = infoGroup["filePath"][()].decode("utf-8")
                if isUrl(filePath):
                    info_class = UrlDatasetInfo
                elif isRelative(filePath):
                    info_class = RelativeFilesystemDatasetInfo
                else:
                    info_class = FilesystemDatasetInfo
            elif location == "ProjectInternal":
                info_class = ProjectInternalDatasetInfo
            else:
                info_class = PreloadedArrayDatasetInfo

        dirty = False
        try:
            datasetInfo = info_class.from_h5_group(infoGroup)
        except FileNotFoundError as e:
            if headless:
                return (DummyDatasetInfo.from_h5_group(infoGroup), True)

            from PyQt5.QtWidgets import QMessageBox
            from ilastik.widgets.ImageFileDialog import ImageFileDialog

            repaired_paths = []
            for missing_path in e.filename.split(os.path.pathsep):
                should_repair = QMessageBox.question(
                    None,
                    "Missing file",
                    (f"File {missing_path} could not be found "
                     "(maybe you moved either that file or the .ilp project file). "
                     "Would you like to look for it elsewhere?"),
                    QMessageBox.Yes | QMessageBox.No,
                )
                if should_repair == QMessageBox.No:
                    raise e
                paths = ImageFileDialog(None).getSelectedPaths()
                if not paths:
                    raise e
                dirty = True
                repaired_paths.extend([str(p) for p in paths])

            if "filePath" in infoGroup:
                del infoGroup["filePath"]
            infoGroup["filePath"] = os.path.pathsep.join(
                repaired_paths).encode("utf-8")
            datasetInfo = FilesystemDatasetInfo.from_h5_group(infoGroup)

        return datasetInfo, dirty
Beispiel #3
0
    def setupOutputs(self):
        """
        Inspect the file name and instantiate and connect an internal operator of the appropriate type.
        TODO: Handle datasets of non-standard (non-5d) dimensions.
        """
        filePath = self.FilePath.value
        assert type(filePath) == str, "Error: filePath is not of type str.  It's of type {}".format(type(filePath))

        # Does this look like a relative path?
        useRelativePath = not isUrl(filePath) and not os.path.isabs(filePath)

        if useRelativePath:
            # If using a relative path, we need both inputs before proceeding
            if not self.WorkingDirectory.ready():
                return
            else:
                # Convert this relative path into an absolute path
                filePath = os.path.normpath(os.path.join(self.WorkingDirectory.value, filePath)).replace('\\','/')

        # Clean up before reconfiguring
        if self.internalOperator is not None:
            self.Output.disconnect()
            self.internalOperator.cleanUp()
            self.internalOperator = None
            self.internalOutput = None
        if self._file is not None:
            self._file.close()

        openFuncs = [ self._attemptOpenAsStack,
                      self._attemptOpenAsHdf5,
                      self._attemptOpenAsNpy,
                      self._attemptOpenAsDvidVolume,
                      self._attemptOpenAsBlockwiseFileset,
                      self._attemptOpenAsRESTfulBlockwiseFileset,
                      self._attemptOpenWithVigraImpex ]

        # Try every method of opening the file until one works.
        iterFunc = openFuncs.__iter__()
        while self.internalOperator is None:
            try:
                openFunc = iterFunc.next()
            except StopIteration:
                break
            self.internalOperator, self.internalOutput = openFunc(filePath)

        if self.internalOutput is None:
            raise RuntimeError("Can't read " + filePath + " because it has an unrecognized format.")

        # Directly connect our own output to the internal output
        self.Output.connect( self.internalOutput )
Beispiel #4
0
    def setupOutputs(self):
        """
        Inspect the file name and instantiate and connect an internal operator of the appropriate type.
        TODO: Handle datasets of non-standard (non-5d) dimensions.
        """
        filePath = self.FilePath.value
        assert type(filePath) == str, "Error: filePath is not of type str.  It's of type {}".format(type(filePath))

        # Does this look like a relative path?
        useRelativePath = not isUrl(filePath) and not os.path.isabs(filePath)

        if useRelativePath:
            # If using a relative path, we need both inputs before proceeding
            if not self.WorkingDirectory.ready():
                return
            else:
                # Convert this relative path into an absolute path
                filePath = os.path.normpath(os.path.join(self.WorkingDirectory.value, filePath)).replace('\\','/')

        # Clean up before reconfiguring
        if self.internalOperator is not None:
            self.Output.disconnect()
            self.internalOperator.cleanUp()
            self.internalOperator = None
            self.internalOutput = None
        if self._file is not None:
            self._file.close()

        openFuncs = [ self._attemptOpenAsStack,
                      self._attemptOpenAsHdf5,
                      self._attemptOpenAsNpy,
                      self._attemptOpenAsDvidVolume,
                      self._attemptOpenAsBlockwiseFileset,
                      self._attemptOpenAsRESTfulBlockwiseFileset,
                      self._attemptOpenWithVigraImpex ]

        # Try every method of opening the file until one works.
        iterFunc = openFuncs.__iter__()
        while self.internalOperator is None:
            try:
                openFunc = iterFunc.next()
            except StopIteration:
                break
            self.internalOperator, self.internalOutput = openFunc(filePath)

        if self.internalOutput is None:
            raise RuntimeError("Can't read " + filePath + " because it has an unrecognized format.")

        # Directly connect our own output to the internal output
        self.Output.connect( self.internalOutput )
Beispiel #5
0
def _prepare_auto_open_project( parsed_args ):
    if parsed_args.project is None:
        return None

    from lazyflow.utility.pathHelpers import PathComponents, isUrl

    # Make sure project file exists.
    if not isUrl(parsed_args.project) and not os.path.exists(parsed_args.project):
        raise RuntimeError("Project file '" + parsed_args.project + "' does not exist.")

    parsed_args.project = os.path.expanduser(parsed_args.project)
    #convert path to convenient format
    path = PathComponents(parsed_args.project).totalPath()
    
    def loadProject(shell):
        # This should work for both the IlastikShell and the HeadlessShell
        shell.openProjectFile(path, parsed_args.readonly)
    return loadProject
Beispiel #6
0
    def setupOutputs(self):
        """
        Inspect the file name and instantiate and connect an internal operator of the appropriate type.
        TODO: Handle datasets of non-standard (non-5d) dimensions.
        """
        filePath = self.FilePath.value
        assert isinstance(filePath, (str,unicode)), "Error: filePath is not of type str.  It's of type {}".format(type(filePath))

        # Does this look like a relative path?
        useRelativePath = not isUrl(filePath) and not os.path.isabs(filePath)

        if useRelativePath:
            # If using a relative path, we need both inputs before proceeding
            if not self.WorkingDirectory.ready():
                return
            else:
                # Convert this relative path into an absolute path
                filePath = os.path.normpath(os.path.join(self.WorkingDirectory.value, filePath)).replace('\\','/')

        # Clean up before reconfiguring
        if self.internalOperators:
            self.Output.disconnect()
            self.opInjector.cleanUp()
            for op in self.internalOperators[::-1]:
                op.cleanUp()
            self.internalOperators = []
            self.internalOutput = None
        if self._file is not None:
            self._file.close()

        openFuncs = [ self._attemptOpenAsUfmf,
                      self._attemptOpenAsMmf,
                      self._attemptOpenAsDvidVolume,
                      self._attemptOpenAsTiffStack,
                      self._attemptOpenAsStack,
                      self._attemptOpenAsHdf5,
                      self._attemptOpenAsNpy,
                      self._attemptOpenAsRawBinary,
                      self._attemptOpenAsBlockwiseFileset,
                      self._attemptOpenAsRESTfulBlockwiseFileset,
                      self._attemptOpenAsTiledVolume,
                      self._attemptOpenAsTiff,
                      self._attemptOpenWithVigraImpex ]

        # Try every method of opening the file until one works.
        iterFunc = openFuncs.__iter__()
        while not self.internalOperators:
            try:
                openFunc = iterFunc.next()
            except StopIteration:
                break
            self.internalOperators, self.internalOutput = openFunc(filePath)

        if self.internalOutput is None:
            raise RuntimeError("Can't read " + filePath + " because it has an unrecognized format.")

        # If we've got a ROI, append a subregion operator.
        if self.SubVolumeRoi.ready():
            self._opSubRegion = OpSubRegion( parent=self )
            self._opSubRegion.Roi.setValue( self.SubVolumeRoi.value )
            self._opSubRegion.Input.connect( self.internalOutput )
            self.internalOutput = self._opSubRegion.Output
        
        self.opInjector = OpMetadataInjector( parent=self )
        self.opInjector.Input.connect( self.internalOutput )
        
        # Add metadata for estimated RAM usage if the internal operator didn't already provide it.
        if self.internalOutput.meta.ram_per_pixelram_usage_per_requested_pixel is None:
            ram_per_pixel = self.internalOutput.meta.dtype().nbytes
            if 'c' in self.internalOutput.meta.getTaggedShape():
                ram_per_pixel *= self.internalOutput.meta.getTaggedShape()['c']
            self.opInjector.Metadata.setValue( {'ram_per_pixelram_usage_per_requested_pixel' : ram_per_pixel} )
        else:
            # Nothing to add
            self.opInjector.Metadata.setValue( {} )            

        # Directly connect our own output to the internal output
        self.Output.connect( self.opInjector.Output )
    def _readDatasetInfo(self, infoGroup, localDataGroup, projectFilePath, headless):
        # Unready datasets are represented with an empty group.
        if len( infoGroup ) == 0:
            return None, False
        datasetInfo = DatasetInfo()

        # Make a reverse-lookup of the location storage strings
        LocationLookup = { v:k for k,v in self.LocationStrings.items() }
        datasetInfo.location = LocationLookup[ str(infoGroup['location'].value) ]
        
        # Write to the 'private' members to avoid resetting the dataset id
        datasetInfo._filePath = infoGroup['filePath'].value
        datasetInfo._datasetId = infoGroup['datasetId'].value

        try:
            datasetInfo.allowLabels = infoGroup['allowLabels'].value
        except KeyError:
            pass
        
        try:
            datasetInfo.drange = tuple( infoGroup['drange'].value )
        except KeyError:
            pass
        
        try:
            datasetInfo.nickname = infoGroup['nickname'].value
        except KeyError:
            datasetInfo.nickname = PathComponents(datasetInfo.filePath).filenameBase
        
        try:
            tags = vigra.AxisTags.fromJSON( infoGroup['axistags'].value )
            datasetInfo.axistags = tags
        except KeyError:
            # Old projects just have an 'axisorder' field instead of full axistags
            try:
                axisorder = infoGroup['axisorder'].value
                datasetInfo.axistags = vigra.defaultAxistags(axisorder)
            except KeyError:
                pass
        
        # If the data is supposed to be in the project,
        #  check for it now.
        if datasetInfo.location == DatasetInfo.Location.ProjectInternal:
            if not datasetInfo.datasetId in localDataGroup.keys():
                raise RuntimeError("Corrupt project file.  Could not find data for " + infoGroup.name)

        dirty = False
        # If the data is supposed to exist outside the project, make sure it really does.
        if datasetInfo.location == DatasetInfo.Location.FileSystem and not isUrl(datasetInfo.filePath):
            pathData = PathComponents( datasetInfo.filePath, os.path.split(projectFilePath)[0])
            filePath = pathData.externalPath
            if not os.path.exists(filePath):
                if headless:
                    raise RuntimeError("Could not find data at " + filePath)
                filt = "Image files (" + ' '.join('*.' + x for x in OpDataSelection.SupportedExtensions) + ')'
                newpath = self.repairFile(filePath, filt)
                if pathData.internalPath is not None:
                    newpath += pathData.internalPath
                datasetInfo._filePath = getPathVariants(newpath , os.path.split(projectFilePath)[0])[0]
                dirty = True
        
        return datasetInfo, dirty
    def setupOutputs(self):
        """
        Inspect the file name and instantiate and connect an internal operator of the appropriate type.
        TODO: Handle datasets of non-standard (non-5d) dimensions.
        """
        filePath = self.FilePath.value
        assert isinstance(
            filePath,
            (str, unicode
             )), "Error: filePath is not of type str.  It's of type {}".format(
                 type(filePath))

        # Does this look like a relative path?
        useRelativePath = not isUrl(filePath) and not os.path.isabs(filePath)

        if useRelativePath:
            # If using a relative path, we need both inputs before proceeding
            if not self.WorkingDirectory.ready():
                return
            else:
                # Convert this relative path into an absolute path
                filePath = os.path.normpath(
                    os.path.join(self.WorkingDirectory.value,
                                 filePath)).replace('\\', '/')

        # Clean up before reconfiguring
        if self.internalOperators:
            self.Output.disconnect()
            self.opInjector.cleanUp()
            for op in self.internalOperators[::-1]:
                op.cleanUp()
            self.internalOperators = []
            self.internalOutput = None
        if self._file is not None:
            self._file.close()

        openFuncs = [
            self._attemptOpenAsKlb, self._attemptOpenAsUfmf,
            self._attemptOpenAsMmf, self._attemptOpenAsDvidVolume,
            self._attemptOpenAsTiffStack, self._attemptOpenAsStack,
            self._attemptOpenAsHdf5, self._attemptOpenAsNpy,
            self._attemptOpenAsRawBinary, self._attemptOpenAsBlockwiseFileset,
            self._attemptOpenAsRESTfulBlockwiseFileset,
            self._attemptOpenAsTiledVolume, self._attemptOpenAsTiff,
            self._attemptOpenWithVigraImpex
        ]

        # Try every method of opening the file until one works.
        iterFunc = openFuncs.__iter__()
        while not self.internalOperators:
            try:
                openFunc = iterFunc.next()
            except StopIteration:
                break
            self.internalOperators, self.internalOutput = openFunc(filePath)

        if self.internalOutput is None:
            raise RuntimeError("Can't read " + filePath +
                               " because it has an unrecognized format.")

        # If we've got a ROI, append a subregion operator.
        if self.SubVolumeRoi.ready():
            self._opSubRegion = OpSubRegion(parent=self)
            self._opSubRegion.Roi.setValue(self.SubVolumeRoi.value)
            self._opSubRegion.Input.connect(self.internalOutput)
            self.internalOutput = self._opSubRegion.Output

        self.opInjector = OpMetadataInjector(parent=self)
        self.opInjector.Input.connect(self.internalOutput)

        # Add metadata for estimated RAM usage if the internal operator didn't already provide it.
        if self.internalOutput.meta.ram_per_pixelram_usage_per_requested_pixel is None:
            ram_per_pixel = self.internalOutput.meta.dtype().nbytes
            if 'c' in self.internalOutput.meta.getTaggedShape():
                ram_per_pixel *= self.internalOutput.meta.getTaggedShape()['c']
            self.opInjector.Metadata.setValue(
                {'ram_per_pixelram_usage_per_requested_pixel': ram_per_pixel})
        else:
            # Nothing to add
            self.opInjector.Metadata.setValue({})

        # Directly connect our own output to the internal output
        self.Output.connect(self.opInjector.Output)
    def _readDatasetInfo(self, infoGroup, localDataGroup, projectFilePath, headless):
        # Unready datasets are represented with an empty group.
        if len( infoGroup ) == 0:
            return None, False
        datasetInfo = DatasetInfo()

        # Make a reverse-lookup of the location storage strings
        LocationLookup = { v:k for k,v in self.LocationStrings.items() }
        datasetInfo.location = LocationLookup[ str(infoGroup['location'].value) ]
        
        # Write to the 'private' members to avoid resetting the dataset id
        datasetInfo._filePath = infoGroup['filePath'].value
        datasetInfo._datasetId = infoGroup['datasetId'].value

        try:
            datasetInfo.allowLabels = infoGroup['allowLabels'].value
        except KeyError:
            pass
        
        try:
            datasetInfo.drange = tuple( infoGroup['drange'].value )
        except KeyError:
            pass
        
        try:
            datasetInfo.nickname = infoGroup['nickname'].value
        except KeyError:
            datasetInfo.nickname = PathComponents(datasetInfo.filePath).filenameBase
        
        try:
            datasetInfo.fromstack = infoGroup['fromstack'].value
        except KeyError:
            # Guess based on the storage setting and original filepath
            datasetInfo.fromstack = ( datasetInfo.location == DatasetInfo.Location.ProjectInternal
                                      and ( ('?' in datasetInfo._filePath) or (os.path.pathsep in datasetInfo._filePath) ) )

        try:
            tags = vigra.AxisTags.fromJSON( infoGroup['axistags'].value )
            datasetInfo.axistags = tags
        except KeyError:
            # Old projects just have an 'axisorder' field instead of full axistags
            try:
                axisorder = infoGroup['axisorder'].value
                datasetInfo.axistags = vigra.defaultAxistags(axisorder)
            except KeyError:
                pass
        
        try:
            start, stop = map( tuple, infoGroup['subvolume_roi'].value )
            datasetInfo.subvolume_roi = (start, stop)
        except KeyError:
            pass
        
        # If the data is supposed to be in the project,
        #  check for it now.
        if datasetInfo.location == DatasetInfo.Location.ProjectInternal:
            if not datasetInfo.datasetId in localDataGroup.keys():
                raise RuntimeError("Corrupt project file.  Could not find data for " + infoGroup.name)

        dirty = False
        # If the data is supposed to exist outside the project, make sure it really does.
        if datasetInfo.location == DatasetInfo.Location.FileSystem and not isUrl(datasetInfo.filePath):
            pathData = PathComponents( datasetInfo.filePath, os.path.split(projectFilePath)[0])
            filePath = pathData.externalPath
            if not os.path.exists(filePath):
                if headless:
                    raise RuntimeError("Could not find data at " + filePath)
                filt = "Image files (" + ' '.join('*.' + x for x in OpDataSelection.SupportedExtensions) + ')'
                newpath = self.repairFile(filePath, filt)
                if pathData.internalPath is not None:
                    newpath += pathData.internalPath
                datasetInfo._filePath = getPathVariants(newpath , os.path.split(projectFilePath)[0])[0]
                dirty = True
        
        return datasetInfo, dirty
    def _readDatasetInfo(self, infoGroup, localDataGroup, projectFilePath, headless):
        # Unready datasets are represented with an empty group.
        if len( infoGroup ) == 0:
            return None, False

        datasetInfo = DatasetInfo()

        # Make a reverse-lookup of the location storage strings
        LocationLookup = { v:k for k,v in list(self.LocationStrings.items()) }
        datasetInfo.location = LocationLookup[ infoGroup['location'].value.decode('utf-8') ]
        
        # Write to the 'private' members to avoid resetting the dataset id
        datasetInfo._filePath = infoGroup['filePath'].value.decode('utf-8')
        datasetInfo._datasetId = infoGroup['datasetId'].value.decode('utf-8')

        try:
            datasetInfo.allowLabels = infoGroup['allowLabels'].value
        except KeyError:
            pass
        
        try:
            datasetInfo.drange = tuple( infoGroup['drange'].value )
        except KeyError:
            pass

        try:
            datasetInfo.laneShape = tuple(infoGroup['shape'].value)
        except KeyError:
            pass

        try:
            datasetInfo.laneDtype = numpy.dtype(infoGroup['dtype'].value.decode('utf-8'))
        except KeyError:
            pass

        try:
            datasetInfo.display_mode = infoGroup['display_mode'].value.decode('utf-8')
        except KeyError:
            pass
        
        try:
            datasetInfo.nickname = infoGroup['nickname'].value.decode('utf-8')
        except KeyError:
            datasetInfo.nickname = PathComponents(datasetInfo.filePath).filenameBase
        
        try:
            datasetInfo.fromstack = infoGroup['fromstack'].value
        except KeyError:
            # Guess based on the storage setting and original filepath
            datasetInfo.fromstack = ( datasetInfo.location == DatasetInfo.Location.ProjectInternal
                                      and ( ('?' in datasetInfo._filePath) or (os.path.pathsep in datasetInfo._filePath) ) )

        try:
            tags = vigra.AxisTags.fromJSON( infoGroup['axistags'].value.decode('utf-8') )
            datasetInfo.axistags = tags
        except KeyError:
            # Old projects just have an 'axisorder' field instead of full axistags
            try:
                axisorder = infoGroup['axisorder'].value.decode('utf-8')
                datasetInfo.axistags = vigra.defaultAxistags(axisorder)
            except KeyError:
                pass
        
        try:
            start, stop = list(map( tuple, infoGroup['subvolume_roi'].value ))
            datasetInfo.subvolume_roi = (start, stop)
        except KeyError:
            pass

        # If the data is supposed to be in the project,
        #  check for it now.
        if datasetInfo.location == DatasetInfo.Location.ProjectInternal:
            if not datasetInfo.datasetId in list(localDataGroup.keys()):
                raise RuntimeError("Corrupt project file.  Could not find data for " + infoGroup.name)

        dirty = False

        # If the data is supposed to exist outside the project, make sure it really does.
        if datasetInfo.location == DatasetInfo.Location.FileSystem \
                and not isUrl(datasetInfo.filePath):
            pathData = PathComponents(datasetInfo.filePath, os.path.split(projectFilePath)[0])
            filePath = pathData.externalPath
            if not os.path.exists(filePath):
                if headless:
                    if self._shouldRetrain:
                        raise RuntimeError(
                            "Retrain was passed in headless mode, "
                            "but could not find data at " + filePath)
                    else:
                        assert datasetInfo.laneShape, \
                            "Headless mode without raw data not supported in old (pre 1.3.2) project files"
                        # Raw data does not exist in headless, use fake data provider
                        datasetInfo.realDataSource = False
                else:
                    # Try to get a new path for the lost file from the user
                    filt = "Image files (" + ' '.join('*.' + x for x in OpDataSelection.SupportedExtensions) + ')'
                    newpath = self.repairFile(filePath, filt)
                    if pathData.internalPath is not None:
                        newpath += pathData.internalPath
                    datasetInfo._filePath = \
                    getPathVariants(newpath, os.path.split(projectFilePath)[0])[0]
                    dirty = True
        
        return datasetInfo, dirty