Example #1
0
    def setupOutputs(self):
        self.cleanupOnDiskView()

        # FIXME: If RawData becomes unready() at the same time as RawDatasetInfo(), then
        #          we have no guarantees about which one will trigger setupOutputs() first.
        #        It is therefore possible for 'RawDatasetInfo' to appear ready() to us,
        #          even though it's upstream partner is UNready.  We are about to get the
        #          unready() notification, but it will come too late to prevent our
        #          setupOutputs method from being called.
        #        Without proper graph setup transaction semantics, we have to use this
        #          hack as a workaround.
        try:
            rawInfo = self.RawDatasetInfo.value
        except:
            for oslot in self.outputs.values():
                if oslot.partner is None:
                    oslot.meta.NOTREADY = True
            return

        dataset_dir = PathComponents(rawInfo.filePath).externalDirectory
        abs_dataset_dir, _ = getPathVariants(dataset_dir,
                                             self.WorkingDirectory.value)
        known_keys = {}
        known_keys['dataset_dir'] = abs_dataset_dir
        known_keys['nickname'] = rawInfo.nickname

        # Disconnect to open the 'transaction'
        if self._opImageOnDiskProvider is not None:
            self._opImageOnDiskProvider.TransactionSlot.disconnect()
        self._opFormattedExport.TransactionSlot.disconnect()

        # Blank the internal path while we manipulate the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opFormattedExport.OutputInternalPath.setValue("")

        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_name = format_known_keys(name_format, known_keys)

        # Convert to absolute path before configuring the internal op
        abs_path, _ = getPathVariants(partially_formatted_name,
                                      self.WorkingDirectory.value)
        self._opFormattedExport.OutputFilenameFormat.setValue(abs_path)

        # use partial formatting on the internal dataset name, too
        internal_dataset_format = self.OutputInternalPath.value
        partially_formatted_dataset_name = format_known_keys(
            internal_dataset_format, known_keys)
        self._opFormattedExport.OutputInternalPath.setValue(
            partially_formatted_dataset_name)

        # Re-connect to finish the 'transaction'
        self._opFormattedExport.TransactionSlot.connect(self.TransactionSlot)
        if self._opImageOnDiskProvider is not None:
            self._opImageOnDiskProvider.TransactionSlot.connect(
                self.TransactionSlot)

        self.setupOnDiskView()
Example #2
0
    def setupOutputs(self):
        # FIXME: If RawData becomes unready() at the same time as RawDatasetInfo(), then
        #          we have no guarantees about which one will trigger setupOutputs() first.
        #        It is therefore possible for 'RawDatasetInfo' to appear ready() to us,
        #          even though it's upstream partner is UNready.  We are about to get the
        #          unready() notification, but it will come too late to prevent our
        #          setupOutputs method from being called.
        #        Without proper graph setup transaction semantics, we have to use this
        #          hack as a workaround.
        try:
            rawInfo = self.RawDatasetInfo.value
        except:
            for oslot in list(self.outputs.values()):
                if oslot.upstream_slot is None:
                    oslot.meta.NOTREADY = True
            return

        selection_index = self.InputSelection.value
        if not self.Inputs[selection_index].ready():
            for oslot in list(self.outputs.values()):
                if oslot.upstream_slot is None:
                    oslot.meta.NOTREADY = True
            return
        self._opFormattedExport.Input.connect(self.Inputs[selection_index])

        dataset_dir = str(rawInfo.default_output_dir)
        abs_dataset_dir, _ = getPathVariants(dataset_dir, self.WorkingDirectory.value)
        known_keys = {}
        known_keys["dataset_dir"] = abs_dataset_dir
        nickname = rawInfo.nickname.replace("*", "")
        if os.path.pathsep in nickname:
            nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
        known_keys["nickname"] = nickname
        result_types = self.SelectionNames.value
        known_keys["result_type"] = result_types[selection_index]

        self._opFormattedExport.TransactionSlot.disconnect()

        # Blank the internal path while we manipulate the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opFormattedExport.OutputInternalPath.setValue("")

        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_name = format_known_keys(name_format, known_keys)

        # Convert to absolute path before configuring the internal op
        abs_path, _ = getPathVariants(partially_formatted_name, self.WorkingDirectory.value)
        self._opFormattedExport.OutputFilenameFormat.setValue(abs_path)

        # use partial formatting on the internal dataset name, too
        internal_dataset_format = self.OutputInternalPath.value
        partially_formatted_dataset_name = format_known_keys(internal_dataset_format, known_keys)
        self._opFormattedExport.OutputInternalPath.setValue(partially_formatted_dataset_name)

        # Re-connect to finish the 'transaction'
        self._opFormattedExport.TransactionSlot.connect(self.TransactionSlot)
Example #3
0
    def setupOutputs(self):
        self.cleanupOnDiskView()        

        # FIXME: If RawData becomes unready() at the same time as RawDatasetInfo(), then 
        #          we have no guarantees about which one will trigger setupOutputs() first.
        #        It is therefore possible for 'RawDatasetInfo' to appear ready() to us, 
        #          even though it's upstream partner is UNready.  We are about to get the 
        #          unready() notification, but it will come too late to prevent our 
        #          setupOutputs method from being called.
        #        Without proper graph setup transaction semantics, we have to use this 
        #          hack as a workaround.
        try:
            rawInfo = self.RawDatasetInfo.value
        except:
            for oslot in self.outputs.values():
                if oslot.partner is None:
                    oslot.meta.NOTREADY = True
            return

        dataset_dir = PathComponents(rawInfo.filePath).externalDirectory
        abs_dataset_dir, _ = getPathVariants(dataset_dir, self.WorkingDirectory.value)
        known_keys = {}        
        known_keys['dataset_dir'] = abs_dataset_dir
        known_keys['nickname'] = rawInfo.nickname

        # Disconnect to open the 'transaction'
        if self._opImageOnDiskProvider is not None:
            self._opImageOnDiskProvider.TransactionSlot.disconnect()
        self._opFormattedExport.TransactionSlot.disconnect()

        # Blank the internal path while we manipulate the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opFormattedExport.OutputInternalPath.setValue( "" )

        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_name = format_known_keys( name_format, known_keys )
        
        # Convert to absolute path before configuring the internal op
        abs_path, _ = getPathVariants( partially_formatted_name, self.WorkingDirectory.value )
        self._opFormattedExport.OutputFilenameFormat.setValue( abs_path )

        # use partial formatting on the internal dataset name, too
        internal_dataset_format = self.OutputInternalPath.value 
        partially_formatted_dataset_name = format_known_keys( internal_dataset_format, known_keys )
        self._opFormattedExport.OutputInternalPath.setValue( partially_formatted_dataset_name )

        # Re-connect to finish the 'transaction'
        self._opFormattedExport.TransactionSlot.connect( self.TransactionSlot )
        if self._opImageOnDiskProvider is not None:
            self._opImageOnDiskProvider.TransactionSlot.connect( self.TransactionSlot )
        
        self.setupOnDiskView()
Example #4
0
    def _initStorageCombo(self):
        # If there's only one dataset, show the path in the combo
        showpaths = False
        relPath = None
        if len( self._laneIndexes ) == 1:
            op = self.tempOps.values()[0]
            info = op.Dataset.value
            cwd = op.WorkingDirectory.value
            filePath = PathComponents(info.filePath).externalPath
            absPath, relPath = getPathVariants(filePath, cwd)
            
            # commented out: 
            # Show the paths even if the data is from a stack (they are grayed out, but potentially informative)
            #showpaths = not info.fromstack
            showpaths = True

        if showpaths:
            self.storageComboBox.addItem( "Copied to Project File", userData=StorageLocation.ProjectFile )
            self.storageComboBox.addItem( decode_to_qstring("Absolute Link: " + absPath), userData=StorageLocation.AbsoluteLink )
            if relPath is not None:
                self.storageComboBox.addItem( decode_to_qstring("Relative Link: " + relPath), userData=StorageLocation.RelativeLink )
        else:
            self.storageComboBox.addItem( "Copied to Project File", userData=StorageLocation.ProjectFile )
            self.storageComboBox.addItem( "Absolute Link", userData=StorageLocation.AbsoluteLink )
            self.storageComboBox.addItem( "Relative Link", userData=StorageLocation.RelativeLink )

        self.storageComboBox.setCurrentIndex(-1)
Example #5
0
    def _applyStorageComboToTempOps(self, index):
        if index == -1:
            return

        newStorageLocation, goodcast = self.storageComboBox.itemData(
            index).toInt()
        assert goodcast

        # Save a copy of our settings
        oldInfos = {}
        for laneIndex, op in self.tempOps.items():
            oldInfos[laneIndex] = copy.copy(op.Dataset.value)

        # Attempt to apply to all temp operators
        try:
            for laneIndex, op in self.tempOps.items():
                info = copy.copy(op.Dataset.value)

                if info.location == DatasetInfo.Location.ProjectInternal:
                    thisLaneStorage = StorageLocation.ProjectFile
                elif info.location == DatasetInfo.Location.FileSystem:
                    # Determine if the path is relative or absolute
                    if isUrl(info.filePath) or os.path.isabs(info.filePath):
                        thisLaneStorage = StorageLocation.AbsoluteLink
                    else:
                        thisLaneStorage = StorageLocation.RelativeLink

                if thisLaneStorage != newStorageLocation:
                    if newStorageLocation == StorageLocation.ProjectFile:
                        info.location = DatasetInfo.Location.ProjectInternal
                    else:
                        info.location = DatasetInfo.Location.FileSystem
                        cwd = op.WorkingDirectory.value
                        absPath, relPath = getPathVariants(info.filePath, cwd)
                        if relPath is not None and newStorageLocation == StorageLocation.RelativeLink:
                            info.filePath = relPath
                        elif newStorageLocation == StorageLocation.AbsoluteLink:
                            info.filePath = absPath
                        else:
                            assert False, "Unknown storage location setting."
                    op.Dataset.setValue(info)
            self._error_fields.discard('Storage Location')
            return True

        except Exception as e:
            # Revert everything back to the previous state
            for laneIndex, op in self.tempOps.items():
                op.Dataset.setValue(oldInfos[laneIndex])

            msg = "Could not set new storage location settings due to an exception:\n"
            msg += "{}".format(e)
            log_exception(logger, msg)
            QMessageBox.warning(self, "Error", msg)
            self._error_fields.add('Storage Location')
            return False

        finally:
            self._updateStorageCombo()
    def _applyStorageComboToTempOps(self, index):
        if index == -1:
            return
        
        newStorageLocation, goodcast = self.storageComboBox.itemData( index ).toInt()
        assert goodcast
        
        # Save a copy of our settings
        oldInfos = {}
        for laneIndex, op in self.tempOps.items():
            oldInfos[laneIndex] = copy.copy( op.Dataset.value )
        
        # Attempt to apply to all temp operators
        try:
            for laneIndex, op in self.tempOps.items():
                info = copy.copy( op.Dataset.value )
                
                if info.location == DatasetInfo.Location.ProjectInternal:
                    thisLaneStorage = StorageLocation.ProjectFile
                elif info.location == DatasetInfo.Location.FileSystem:
                    # Determine if the path is relative or absolute
                    if isUrl(info.filePath) or os.path.isabs(info.filePath):
                        thisLaneStorage = StorageLocation.AbsoluteLink
                    else:
                        thisLaneStorage = StorageLocation.RelativeLink

                if thisLaneStorage != newStorageLocation:
                    if newStorageLocation == StorageLocation.ProjectFile:
                        info.location = DatasetInfo.Location.ProjectInternal
                    else:
                        info.location = DatasetInfo.Location.FileSystem 
                        cwd = op.WorkingDirectory.value
                        absPath, relPath = getPathVariants( info.filePath, cwd )
                        if relPath is not None and newStorageLocation == StorageLocation.RelativeLink:
                            info.filePath = relPath
                        elif newStorageLocation == StorageLocation.AbsoluteLink:
                            info.filePath = absPath
                        else:
                            assert False, "Unknown storage location setting."
                    op.Dataset.setValue( info )
            self._error_fields.discard('Storage Location')
            return True
        
        except Exception as e:
            # Revert everything back to the previous state
            for laneIndex, op in self.tempOps.items():
                op.Dataset.setValue( oldInfos[laneIndex] )
            
            msg = "Could not set new storage location settings due to an exception:\n"
            msg += "{}".format( e )
            log_exception( logger, msg )
            QMessageBox.warning(self, "Error", msg)
            self._error_fields.add('Storage Location')
            return False
        
        finally:
            self._updateStorageCombo()
Example #7
0
    def _initInternalDatasetNameCombo(self):
        # If any dataset is either (1) not hdf5 or (2) project-internal, then we can't change the internal path.
        h5Exts = ['.ilp', '.h5', '.hdf5']
        for laneIndex in self._laneIndexes:
            tmpOp = self.tempOps[laneIndex]
            datasetInfo = tmpOp.Dataset.value
            externalPath = PathComponents(datasetInfo.filePath).externalPath
            if os.path.splitext(externalPath)[1] not in h5Exts \
            or datasetInfo.location == DatasetInfo.Location.ProjectInternal:
                self.internalDatasetNameComboBox.addItem("N/A")
                self.internalDatasetNameComboBox.setEnabled(False)
                return

        # Enable IFF all datasets have at least one common internal dataset, and only show COMMON datasets
        allInternalPaths = set()
        commonInternalPaths = None

        for laneIndex in self._laneIndexes:
            tmpOp = self.tempOps[laneIndex]
            datasetInfo = tmpOp.Dataset.value

            externalPath = PathComponents(datasetInfo.filePath).externalPath
            absPath, _ = getPathVariants(externalPath,
                                         tmpOp.WorkingDirectory.value)
            internalPaths = set(self._getPossibleInternalPaths(absPath))

            if commonInternalPaths is None:
                # Init with the first file's set of paths
                commonInternalPaths = internalPaths

            # Set operations
            allInternalPaths |= internalPaths
            commonInternalPaths &= internalPaths
            if len(commonInternalPaths) == 0:
                self.internalDatasetNameComboBox.addItem(
                    "Couldn't find a dataset name common to all selected files."
                )
                self.internalDatasetNameComboBox.setEnabled(False)
                return

        uncommonInternalPaths = allInternalPaths - commonInternalPaths
        # Add all common paths to the combo
        for path in sorted(commonInternalPaths):
            self.internalDatasetNameComboBox.addItem(path)

        # Add the remaining ones, but disable them since they aren't common to all files:
        for path in sorted(uncommonInternalPaths):
            self.internalDatasetNameComboBox.addItem(path)
            # http://theworldwideinternet.blogspot.com/2011/01/disabling-qcombobox-items.html
            model = self.internalDatasetNameComboBox.model()
            index = model.index(self.internalDatasetNameComboBox.count() - 1,
                                0)
            model.setData(index, 0, Qt.UserRole - 1)

        # Finally, initialize with NO item selected
        self.internalDatasetNameComboBox.setCurrentIndex(-1)
    def _initInternalDatasetNameCombo(self):
        # If any dataset is either (1) not hdf5 or (2) project-internal, then we can't change the internal path.
        h5Exts = [".ilp", ".h5", ".hdf5"]
        for laneIndex in self._laneIndexes:
            tmpOp = self.tempOps[laneIndex]
            datasetInfo = tmpOp.Dataset.value
            externalPath = PathComponents(datasetInfo.filePath).externalPath
            if (
                os.path.splitext(externalPath)[1] not in h5Exts
                or datasetInfo.location == DatasetInfo.Location.ProjectInternal
            ):
                self.internalDatasetNameComboBox.addItem("N/A")
                self.internalDatasetNameComboBox.setEnabled(False)
                return

        # Enable IFF all datasets have at least one common internal dataset, and only show COMMON datasets
        allInternalPaths = set()
        commonInternalPaths = None

        for laneIndex in self._laneIndexes:
            tmpOp = self.tempOps[laneIndex]
            datasetInfo = tmpOp.Dataset.value

            externalPath = PathComponents(datasetInfo.filePath).externalPath
            absPath, _ = getPathVariants(externalPath, tmpOp.WorkingDirectory.value)
            internalPaths = set(self._getPossibleInternalPaths(absPath))

            if commonInternalPaths is None:
                # Init with the first file's set of paths
                commonInternalPaths = internalPaths

            # Set operations
            allInternalPaths |= internalPaths
            commonInternalPaths &= internalPaths
            if len(commonInternalPaths) == 0:
                self.internalDatasetNameComboBox.addItem("Couldn't find a dataset name common to all selected files.")
                self.internalDatasetNameComboBox.setEnabled(False)
                return

        uncommonInternalPaths = allInternalPaths - commonInternalPaths
        # Add all common paths to the combo
        for path in sorted(commonInternalPaths):
            self.internalDatasetNameComboBox.addItem(path)

        # Add the remaining ones, but disable them since they aren't common to all files:
        for path in sorted(uncommonInternalPaths):
            self.internalDatasetNameComboBox.addItem(path)
            # http://theworldwideinternet.blogspot.com/2011/01/disabling-qcombobox-items.html
            model = self.internalDatasetNameComboBox.model()
            index = model.index(self.internalDatasetNameComboBox.count() - 1, 0)
            model.setData(index, 0, Qt.UserRole - 1)

        # Finally, initialize with NO item selected
        self.internalDatasetNameComboBox.setCurrentIndex(-1)
Example #9
0
    def getDatasetDirectory(self, blockstart):
        """
        Return the directory that contains the block that starts at the given coordinates.
        """
        # Add the view origin to find the on-disk block coordinates
        blockstart = numpy.add(blockstart, self._description.view_origin)
        descriptionFileDir = os.path.split(self._descriptionFilePath)[0]
        absPath, _ = getPathVariants(self._description.dataset_root_dir, descriptionFileDir)
        blockFilePath = absPath

        for axis, start in zip(self._description.axes, blockstart):
            blockFilePath = os.path.join(blockFilePath, "{}_{:08d}".format(axis, start))
        return blockFilePath
Example #10
0
    def format_path(self, path_template: str) -> str:
        dataset_dir = str(self._dataset_info.default_output_dir)
        abs_dataset_dir, _ = getPathVariants(dataset_dir, self._working_dir)

        nickname = self._dataset_info.nickname.replace("*", "")
        if os.path.pathsep in nickname:
            nickname = PathComponents(nickname.split(
                os.path.pathsep)[0]).fileNameBase

        known_keys = {"dataset_dir": abs_dataset_dir, "nickname": nickname}

        if self._result_type:
            known_keys["result_type"] = self._result_type

        return format_known_keys(path_template, known_keys)
Example #11
0
    def exportSubset(self, roi, exportDirectory, use_view_coordinates=True):
        """
        Create a new blockwise fileset by copying a subset of this blockwise fileset.

        :param roi: The portion to export.  Must be along block boundaries, in ABSOLUTE coordinates.
        :param exportDirectory: The directory to copy the new blockwise fileset to.
        """
        # For now, this implementation assumes it can simply copy EVERYTHING in the block directories,
        #  including lock files.  Therefore, we require that the fileset be opened in read-only mode.
        # If that's a problem, change this function to ignore lock files when copying (or purge them afterwards).
        roi = list(map(TinyVector, roi))
        if not use_view_coordinates:
            abs_roi = roi
            assert (
                abs_roi[0] >= self.description.view_origin
            ), "Roi {} is out-of-bounds: must not span lower than the view origin: ".format(
                roi, self.description.origin
            )
        else:
            abs_roi = roi + self.description.view_origin

        assert self.mode == "r", "Can't export from a fileset that is open in read/write mode."

        block_shape = self._description.block_shape
        abs_shape = self._description.shape
        view_origin = self._description.view_origin

        assert (abs_roi[0] % block_shape == 0).all(), "exportSubset() requires roi to start on a block boundary"
        assert (
            (abs_roi[1] % block_shape == 0) | (abs_roi[1] == abs_shape)
        ).all(), "exported subset must end on block or dataset boundary."

        if not os.path.exists(exportDirectory):
            os.makedirs(exportDirectory)

        source_desc_path = self._descriptionFilePath
        source_desc_dir, source_desc_filename = os.path.split(source_desc_path)
        source_root_dir = self.description.dataset_root_dir

        # Copy/update description file
        dest_desc_path = os.path.join(exportDirectory, source_desc_filename)
        if os.path.exists(dest_desc_path):
            dest_description = BlockwiseFileset.readDescription(dest_desc_path)
        else:
            dest_description = copy.copy(self._description)
            dest_description.view_shape = abs_roi[1] - view_origin
            dest_description.hash_id = None

        BlockwiseFileset.writeDescription(dest_desc_path, dest_description)

        # Determine destination root block dir
        if os.path.isabs(source_root_dir):
            source_root_dir = os.path.normpath(source_root_dir)
            source_root_dir_name = os.path.split(source_root_dir)[1]
            dest_root_dir = os.path.join(exportDirectory, source_root_dir_name)
        else:
            dest_root_dir = os.path.join(exportDirectory, source_root_dir)

        source_root_dir, _ = getPathVariants(source_root_dir, source_desc_dir)

        view_roi = abs_roi - view_origin
        block_starts = getIntersectingBlocks(block_shape, view_roi)
        for block_start in block_starts:
            source_block_dir = self.getDatasetDirectory(block_start)
            rel_block_dir = os.path.relpath(source_block_dir, source_root_dir)
            dest_block_dir = os.path.join(dest_root_dir, rel_block_dir)

            if os.path.exists(dest_block_dir):
                logger.info("Skipping existing block directory: {}".format(dest_block_dir))
            elif not os.path.exists(source_block_dir):
                logger.info("Skipping missing block directory: {}".format(source_block_dir))
            else:
                # Copy the entire block directory
                assert dest_block_dir[-1] != "/"
                dest_block_dir_parent = os.path.split(dest_block_dir)[0]
                if not os.path.exists(dest_block_dir_parent):
                    os.makedirs(dest_block_dir_parent)
                shutil.copytree(source_block_dir, dest_block_dir)

        return dest_desc_path