コード例 #1
0
    def setupOutputs(self):
        self.cleanupOnDiskView()

        # FIXME: If RawData becomes unready() at the same time as RawDatasetInfo(), then
        #          we have no guarantees about which one will trigger setupOutputs() first.
        #        It is therefore possible for 'RawDatasetInfo' to appear ready() to us,
        #          even though it's upstream partner is UNready.  We are about to get the
        #          unready() notification, but it will come too late to prevent our
        #          setupOutputs method from being called.
        #        Without proper graph setup transaction semantics, we have to use this
        #          hack as a workaround.
        try:
            rawInfo = self.RawDatasetInfo.value
        except:
            for oslot in self.outputs.values():
                if oslot.partner is None:
                    oslot.meta.NOTREADY = True
            return

        dataset_dir = PathComponents(rawInfo.filePath).externalDirectory
        abs_dataset_dir, _ = getPathVariants(dataset_dir,
                                             self.WorkingDirectory.value)
        known_keys = {}
        known_keys['dataset_dir'] = abs_dataset_dir
        known_keys['nickname'] = rawInfo.nickname

        # Disconnect to open the 'transaction'
        if self._opImageOnDiskProvider is not None:
            self._opImageOnDiskProvider.TransactionSlot.disconnect()
        self._opFormattedExport.TransactionSlot.disconnect()

        # Blank the internal path while we manipulate the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opFormattedExport.OutputInternalPath.setValue("")

        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_name = format_known_keys(name_format, known_keys)

        # Convert to absolute path before configuring the internal op
        abs_path, _ = getPathVariants(partially_formatted_name,
                                      self.WorkingDirectory.value)
        self._opFormattedExport.OutputFilenameFormat.setValue(abs_path)

        # use partial formatting on the internal dataset name, too
        internal_dataset_format = self.OutputInternalPath.value
        partially_formatted_dataset_name = format_known_keys(
            internal_dataset_format, known_keys)
        self._opFormattedExport.OutputInternalPath.setValue(
            partially_formatted_dataset_name)

        # Re-connect to finish the 'transaction'
        self._opFormattedExport.TransactionSlot.connect(self.TransactionSlot)
        if self._opImageOnDiskProvider is not None:
            self._opImageOnDiskProvider.TransactionSlot.connect(
                self.TransactionSlot)

        self.setupOnDiskView()
コード例 #2
0
ファイル: opDataExport.py プロジェクト: boykovdn/ilastik
    def setupOutputs(self):
        # FIXME: If RawData becomes unready() at the same time as RawDatasetInfo(), then
        #          we have no guarantees about which one will trigger setupOutputs() first.
        #        It is therefore possible for 'RawDatasetInfo' to appear ready() to us,
        #          even though it's upstream partner is UNready.  We are about to get the
        #          unready() notification, but it will come too late to prevent our
        #          setupOutputs method from being called.
        #        Without proper graph setup transaction semantics, we have to use this
        #          hack as a workaround.
        try:
            rawInfo = self.RawDatasetInfo.value
        except:
            for oslot in list(self.outputs.values()):
                if oslot.upstream_slot is None:
                    oslot.meta.NOTREADY = True
            return

        selection_index = self.InputSelection.value
        if not self.Inputs[selection_index].ready():
            for oslot in list(self.outputs.values()):
                if oslot.upstream_slot is None:
                    oslot.meta.NOTREADY = True
            return
        self._opFormattedExport.Input.connect(self.Inputs[selection_index])

        dataset_dir = str(rawInfo.default_output_dir)
        abs_dataset_dir, _ = getPathVariants(dataset_dir, self.WorkingDirectory.value)
        known_keys = {}
        known_keys["dataset_dir"] = abs_dataset_dir
        nickname = rawInfo.nickname.replace("*", "")
        if os.path.pathsep in nickname:
            nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
        known_keys["nickname"] = nickname
        result_types = self.SelectionNames.value
        known_keys["result_type"] = result_types[selection_index]

        self._opFormattedExport.TransactionSlot.disconnect()

        # Blank the internal path while we manipulate the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opFormattedExport.OutputInternalPath.setValue("")

        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_name = format_known_keys(name_format, known_keys)

        # Convert to absolute path before configuring the internal op
        abs_path, _ = getPathVariants(partially_formatted_name, self.WorkingDirectory.value)
        self._opFormattedExport.OutputFilenameFormat.setValue(abs_path)

        # use partial formatting on the internal dataset name, too
        internal_dataset_format = self.OutputInternalPath.value
        partially_formatted_dataset_name = format_known_keys(internal_dataset_format, known_keys)
        self._opFormattedExport.OutputInternalPath.setValue(partially_formatted_dataset_name)

        # Re-connect to finish the 'transaction'
        self._opFormattedExport.TransactionSlot.connect(self.TransactionSlot)
コード例 #3
0
ファイル: opDataExport.py プロジェクト: burcin/ilastik
    def setupOutputs(self):
        self.cleanupOnDiskView()        

        # FIXME: If RawData becomes unready() at the same time as RawDatasetInfo(), then 
        #          we have no guarantees about which one will trigger setupOutputs() first.
        #        It is therefore possible for 'RawDatasetInfo' to appear ready() to us, 
        #          even though it's upstream partner is UNready.  We are about to get the 
        #          unready() notification, but it will come too late to prevent our 
        #          setupOutputs method from being called.
        #        Without proper graph setup transaction semantics, we have to use this 
        #          hack as a workaround.
        try:
            rawInfo = self.RawDatasetInfo.value
        except:
            for oslot in self.outputs.values():
                if oslot.partner is None:
                    oslot.meta.NOTREADY = True
            return

        dataset_dir = PathComponents(rawInfo.filePath).externalDirectory
        abs_dataset_dir, _ = getPathVariants(dataset_dir, self.WorkingDirectory.value)
        known_keys = {}        
        known_keys['dataset_dir'] = abs_dataset_dir
        known_keys['nickname'] = rawInfo.nickname

        # Disconnect to open the 'transaction'
        if self._opImageOnDiskProvider is not None:
            self._opImageOnDiskProvider.TransactionSlot.disconnect()
        self._opFormattedExport.TransactionSlot.disconnect()

        # Blank the internal path while we manipulate the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opFormattedExport.OutputInternalPath.setValue( "" )

        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_name = format_known_keys( name_format, known_keys )
        
        # Convert to absolute path before configuring the internal op
        abs_path, _ = getPathVariants( partially_formatted_name, self.WorkingDirectory.value )
        self._opFormattedExport.OutputFilenameFormat.setValue( abs_path )

        # use partial formatting on the internal dataset name, too
        internal_dataset_format = self.OutputInternalPath.value 
        partially_formatted_dataset_name = format_known_keys( internal_dataset_format, known_keys )
        self._opFormattedExport.OutputInternalPath.setValue( partially_formatted_dataset_name )

        # Re-connect to finish the 'transaction'
        self._opFormattedExport.TransactionSlot.connect( self.TransactionSlot )
        if self._opImageOnDiskProvider is not None:
            self._opImageOnDiskProvider.TransactionSlot.connect( self.TransactionSlot )
        
        self.setupOnDiskView()
コード例 #4
0
    def post_process_lane_export(self, lane_index):
        # FIXME: This probably only works for the non-blockwise export slot.
        #        We should assert that the user isn't using the blockwise slot.
        settings, selected_features = self.trackingApplet.topLevelOperator.getLane(lane_index).get_table_export_settings()
        from lazyflow.utility import PathComponents, make_absolute, format_known_keys

        if settings:
            self.dataExportTrackingApplet.progressSignal.emit(-1)
            raw_dataset_info = self.dataSelectionApplet.topLevelOperator.DatasetGroup[lane_index][0].value

            project_path = self.shell.projectManager.currentProjectPath
            project_dir = os.path.dirname(project_path)
            dataset_dir = PathComponents(raw_dataset_info.filePath).externalDirectory
            abs_dataset_dir = make_absolute(dataset_dir, cwd=project_dir)

            known_keys = {}
            known_keys['dataset_dir'] = abs_dataset_dir
            nickname = raw_dataset_info.nickname.replace('*', '')
            if os.path.pathsep in nickname:
                nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
            known_keys['nickname'] = nickname

            # use partial formatting to fill in non-coordinate name fields
            name_format = settings['file path']
            partially_formatted_name = format_known_keys( name_format, known_keys )
            settings['file path'] = partially_formatted_name

            req = self.trackingApplet.topLevelOperator.getLane(lane_index).export_object_data(
                        lane_index,
                        # FIXME: Even in non-headless mode, we can't show the gui because we're running in a non-main thread.
                        #        That's not a huge deal, because there's still a progress bar for the overall export.
                        show_gui=False)

            req.wait()
            self.dataExportTrackingApplet.progressSignal.emit(100)
コード例 #5
0
    def _executeExportPath(self, result):
        path_format = self.OutputFilenameFormat.value
        file_extension = self._export_impls[ self.OutputFormat.value ][0]
        
        # Remove existing extension (if present) and add the correct extension (if any)
        if file_extension:
            path_format = os.path.splitext(path_format)[0]
            path_format += '.' + file_extension

        # Provide the TOTAL path (including dataset name)
        if self.OutputFormat.value == 'hdf5':
            path_format += '/' + self.OutputInternalPath.value

        roi = numpy.array( roiFromShape(self.Input.meta.shape) )
        
        # Intermediate state can cause coordinate offset and input shape to be mismatched.
        # Just don't use the offset if it looks wrong.
        # (The client will provide a valid offset later on.)
        if self.CoordinateOffset.ready() and len(self.CoordinateOffset.value) == len(roi[0]):
            offset = self.CoordinateOffset.value
            assert len(roi[0] == len(offset))
            roi += offset
        optional_replacements = {}
        optional_replacements['roi'] = map(tuple, roi)
        for key, (start, stop) in zip( self.Input.meta.getAxisKeys(), roi.transpose() ):
            optional_replacements[key + '_start'] = start
            optional_replacements[key + '_stop'] = stop
        formatted_path = format_known_keys( path_format, optional_replacements )
        result[0] = formatted_path
        return result
コード例 #6
0
ファイル: opExportSlot.py プロジェクト: soledis92/lazyflow
    def _executeExportPath(self, result):
        path_format = self.OutputFilenameFormat.value
        file_extension = self._export_impls[self.OutputFormat.value][0]

        # Remove existing extension (if present) and add the correct extension (if any)
        if file_extension:
            path_format = os.path.splitext(path_format)[0]
            path_format += '.' + file_extension

        # Provide the TOTAL path (including dataset name)
        if self.OutputFormat.value == 'hdf5':
            path_format += '/' + self.OutputInternalPath.value

        roi = numpy.array(roiFromShape(self.Input.meta.shape))

        # Intermediate state can cause coordinate offset and input shape to be mismatched.
        # Just don't use the offset if it looks wrong.
        # (The client will provide a valid offset later on.)
        if self.CoordinateOffset.ready() and len(
                self.CoordinateOffset.value) == len(roi[0]):
            offset = self.CoordinateOffset.value
            assert len(roi[0] == len(offset))
            roi += offset
        optional_replacements = {}
        optional_replacements['roi'] = map(tuple, roi)
        for key, (start, stop) in zip(self.Input.meta.getAxisKeys(),
                                      roi.transpose()):
            optional_replacements[key + '_start'] = start
            optional_replacements[key + '_stop'] = stop
        formatted_path = format_known_keys(path_format, optional_replacements)
        result[0] = formatted_path
        return result
コード例 #7
0
    def _handleFormatChange(self, index):
        file_format = str(self.formatCombo.currentText())
        option_widget = self._format_option_editors[file_format]
        self._opDataExport.OutputFormat.setValue(file_format)

        # Auto-remove any instance of 'slice_index' from the
        #  dataset path if the user switches to a non-sequence type
        # TODO: This is a little hacky.  Could be fixed by defining an ABC for
        #       file option widgets with a 'repair path' method or something
        #       similar, but that seems like overkill for now.
        export_path = str(self._opDataExport.OutputFilenameFormat.value)
        if not isinstance(option_widget, StackExportFileOptionsWidget) \
           and re.search('{slice_index.*}', export_path):
            try:
                from lazyflow.utility import format_known_keys
                export_path = format_known_keys(export_path,
                                                {'slice_index': 1234567890})
                export_path = export_path.replace('1234567890', '')
            except:
                pass
            else:
                self._opDataExport.OutputFilenameFormat.setValue(export_path)

        # Show the new option widget
        self.stackedWidget.setCurrentWidget(option_widget)

        self._handlePathValidityChange()
コード例 #8
0
    def _handleFormatChange(self, index):
        file_format = str(self.formatCombo.currentText())
        option_widget = self._format_option_editors[file_format]
        self._opDataExport.OutputFormat.setValue(file_format)

        # Auto-remove any instance of 'slice_index' from the
        #  dataset path if the user switches to a non-sequence type
        # TODO: This is a little hacky.  Could be fixed by defining an ABC for
        #       file option widgets with a 'repair path' method or something
        #       similar, but that seems like overkill for now.
        export_path = str(self._opDataExport.OutputFilenameFormat.value)
        if not isinstance(option_widget, StackExportFileOptionsWidget) and re.search("{slice_index.*}", export_path):
            try:
                from lazyflow.utility import format_known_keys

                export_path = format_known_keys(export_path, {"slice_index": 1234567890})
                export_path = export_path.replace("1234567890", "")
            except:
                pass
            else:
                self._opDataExport.OutputFilenameFormat.setValue(export_path)

        # Show the new option widget
        self.stackedWidget.setCurrentWidget(option_widget)

        self._handlePathValidityChange()
コード例 #9
0
ファイル: opExportSlot.py プロジェクト: burgerdev/lazyflow
    def _executeExportPath(self, result):
        path_format = self.OutputFilenameFormat.value
        file_extension = self._export_impls[ self.OutputFormat.value ][0]
        
        # Remove existing extension (if present) and add the correct extension
        path_format = os.path.splitext(path_format)[0]
        path_format += '.' + file_extension

        # Provide the TOTAL path (including dataset name)
        if self.OutputFormat.value == 'hdf5':
            path_format += '/' + self.OutputInternalPath.value

        roi = numpy.array( roiFromShape(self.Input.meta.shape) )
        if self.CoordinateOffset.ready():
            offset = self.CoordinateOffset.value
            assert len(roi[0] == len(offset))
            roi += offset
        optional_replacements = {}
        optional_replacements['roi'] = map(tuple, roi)
        for key, (start, stop) in zip( self.Input.meta.getAxisKeys(), roi.transpose() ):
            optional_replacements[key + '_start'] = start
            optional_replacements[key + '_stop'] = stop
        formatted_path = format_known_keys( path_format, optional_replacements )
        result[0] = formatted_path
        return result
コード例 #10
0
    def post_process_lane_export(self, lane_index):
        # FIXME: This probably only works for the non-blockwise export slot.
        #        We should assert that the user isn't using the blockwise slot.
        settings, selected_features = self.trackingApplet.topLevelOperator.getLane(lane_index).get_table_export_settings()
        from lazyflow.utility import PathComponents, make_absolute, format_known_keys

        if settings:
            self.dataExportTrackingApplet.progressSignal.emit(-1)
            raw_dataset_info = self.dataSelectionApplet.topLevelOperator.DatasetGroup[lane_index][0].value

            project_path = self.shell.projectManager.currentProjectPath
            project_dir = os.path.dirname(project_path)
            dataset_dir = PathComponents(raw_dataset_info.filePath).externalDirectory
            abs_dataset_dir = make_absolute(dataset_dir, cwd=project_dir)

            known_keys = {}
            known_keys['dataset_dir'] = abs_dataset_dir
            nickname = raw_dataset_info.nickname.replace('*', '')
            if os.path.pathsep in nickname:
                nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
            known_keys['nickname'] = nickname

            # use partial formatting to fill in non-coordinate name fields
            name_format = settings['file path']
            partially_formatted_name = format_known_keys( name_format, known_keys )
            settings['file path'] = partially_formatted_name

            req = self.trackingApplet.topLevelOperator.getLane(lane_index).export_object_data(
                        lane_index,
                        # FIXME: Even in non-headless mode, we can't show the gui because we're running in a non-main thread.
                        #        That's not a huge deal, because there's still a progress bar for the overall export.
                        show_gui=False)

            req.wait()
            self.dataExportTrackingApplet.progressSignal.emit(100)
コード例 #11
0
    def format_path(self, path_template: str) -> str:
        dataset_dir = str(self._dataset_info.default_output_dir)
        abs_dataset_dir, _ = getPathVariants(dataset_dir, self._working_dir)

        nickname = self._dataset_info.nickname.replace("*", "")
        if os.path.pathsep in nickname:
            nickname = PathComponents(nickname.split(
                os.path.pathsep)[0]).fileNameBase

        known_keys = {"dataset_dir": abs_dataset_dir, "nickname": nickname}

        if self._result_type:
            known_keys["result_type"] = self._result_type

        return format_known_keys(path_template, known_keys)
コード例 #12
0
 def getPartiallyFormattedName(self, lane_index, path_format_string):
     ''' Takes the format string for the output file, fills in the most important placeholders, and returns it '''
     raw_dataset_info = self.dataSelectionApplet.topLevelOperator.DatasetGroup[lane_index][0].value
     project_path = self.shell.projectManager.currentProjectPath
     project_dir = os.path.dirname(project_path)
     dataset_dir = PathComponents(raw_dataset_info.filePath).externalDirectory
     abs_dataset_dir = make_absolute(dataset_dir, cwd=project_dir)
     known_keys = {}
     known_keys['dataset_dir'] = abs_dataset_dir
     nickname = raw_dataset_info.nickname.replace('*', '')
     if os.path.pathsep in nickname:
         nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
     known_keys['nickname'] = nickname
     known_keys['result_type'] = self.dataExportTrackingApplet.topLevelOperator.SelectedPlugin._value
     # use partial formatting to fill in non-coordinate name fields
     partially_formatted_name = format_known_keys(path_format_string, known_keys)
     return partially_formatted_name
コード例 #13
0
 def getPartiallyFormattedName(self, lane_index, path_format_string):
     ''' Takes the format string for the output file, fills in the most important placeholders, and returns it '''
     raw_dataset_info = self.dataSelectionApplet.topLevelOperator.DatasetGroup[lane_index][0].value
     project_path = self.shell.projectManager.currentProjectPath
     project_dir = os.path.dirname(project_path)
     dataset_dir = PathComponents(raw_dataset_info.filePath).externalDirectory
     abs_dataset_dir = make_absolute(dataset_dir, cwd=project_dir)
     known_keys = {}
     known_keys['dataset_dir'] = abs_dataset_dir
     nickname = raw_dataset_info.nickname.replace('*', '')
     if os.path.pathsep in nickname:
         nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
     known_keys['nickname'] = nickname
     known_keys['result_type'] = self.dataExportTrackingApplet.topLevelOperator.SelectedPlugin._value
     # use partial formatting to fill in non-coordinate name fields
     partially_formatted_name = format_known_keys(path_format_string, known_keys)
     return partially_formatted_name
コード例 #14
0
    def post_process_lane_export(self, lane_index):
        settings, selected_features = self.trackingApplet.topLevelOperator.getLane(
            lane_index).get_table_export_settings()
        if settings:
            self.dataExportApplet.progressSignal.emit(0)
            raw_dataset_info = self.dataSelectionApplet.topLevelOperator.DatasetGroup[
                lane_index][0].value

            project_path = self.shell.projectManager.currentProjectPath
            project_dir = os.path.dirname(project_path)
            dataset_dir = PathComponents(
                raw_dataset_info.filePath).externalDirectory
            abs_dataset_dir = make_absolute(dataset_dir, cwd=project_dir)

            known_keys = {}
            known_keys['dataset_dir'] = abs_dataset_dir
            nickname = raw_dataset_info.nickname.replace('*', '')
            if os.path.pathsep in nickname:
                nickname = PathComponents(nickname.split(
                    os.path.pathsep)[0]).fileNameBase
            known_keys['nickname'] = nickname

            # use partial formatting to fill in non-coordinate name fields
            name_format = settings['file path']
            partially_formatted_name = format_known_keys(
                name_format, known_keys)
            settings['file path'] = partially_formatted_name

            req = self.trackingApplet.topLevelOperator.getLane(
                lane_index
            ).export_object_data(
                lane_index,
                # FIXME: Even in non-headless mode, we can't show the gui because we're running in a non-main thread.
                #        That's not a huge deal, because there's still a progress bar for the overall export.
                show_gui=False)

            req.wait()
            self.dataExportApplet.progressSignal.emit(100)

            # Restore state of axis ranges
            parameters = self.trackingApplet.topLevelOperator.Parameters.value
            parameters['time_range'] = self.prev_time_range
            parameters['x_range'] = self.prev_x_range
            parameters['y_range'] = self.prev_y_range
            parameters['z_range'] = self.prev_z_range
コード例 #15
0
    def getPartiallyFormattedName(self, lane_index: int, path_format_string: str) -> str:
        ''' Takes the format string for the output file, fills in the most important placeholders, and returns it '''

        raw_dataset_info = self.topLevelOperator.RawDatasetInfo[lane_index].value
        project_path = self.topLevelOperator.WorkingDirectory.value
        dataset_dir = PathComponents(raw_dataset_info.filePath).externalDirectory
        abs_dataset_dir = make_absolute(dataset_dir, cwd=project_path)

        nickname = raw_dataset_info.nickname.replace('*', '')
        if os.path.pathsep in nickname:
            nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase

        known_keys = {
            'dataset_dir': abs_dataset_dir,
            'nickname': nickname,
            'result_type': self.topLevelOperator.SelectedPlugin._value,
        }

        return format_known_keys(path_format_string, known_keys)
コード例 #16
0
    def post_process_lane_export(self, lane_index):
        settings, selected_features = self.trackingApplet.topLevelOperator.getLane(lane_index).get_table_export_settings()
        if settings:
            self.dataExportApplet.progressSignal.emit(0)
            raw_dataset_info = self.dataSelectionApplet.topLevelOperator.DatasetGroup[lane_index][0].value
            
            project_path = self.shell.projectManager.currentProjectPath
            project_dir = os.path.dirname(project_path)
            dataset_dir = PathComponents(raw_dataset_info.filePath).externalDirectory
            abs_dataset_dir = make_absolute(dataset_dir, cwd=project_dir)

            known_keys = {}        
            known_keys['dataset_dir'] = abs_dataset_dir
            nickname = raw_dataset_info.nickname.replace('*', '')
            if os.path.pathsep in nickname:
                nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
            known_keys['nickname'] = nickname
            
            # use partial formatting to fill in non-coordinate name fields
            name_format = settings['file path']
            partially_formatted_name = format_known_keys( name_format, known_keys )
            settings['file path'] = partially_formatted_name

            req = self.trackingApplet.topLevelOperator.getLane(lane_index).export_object_data(
                        lane_index, 
                        # FIXME: Even in non-headless mode, we can't show the gui because we're running in a non-main thread.
                        #        That's not a huge deal, because there's still a progress bar for the overall export.
                        show_gui=False)

            req.wait()
            self.dataExportApplet.progressSignal.emit(100)
            
            # Restore state of axis ranges
            parameters = self.trackingApplet.topLevelOperator.Parameters.value
            parameters['time_range'] = self.prev_time_range
            parameters['x_range'] = self.prev_x_range
            parameters['y_range'] = self.prev_y_range
            parameters['z_range'] = self.prev_z_range          
コード例 #17
0
    def setupOutputs(self):
        # Prepare subregion operator
        total_roi = roiFromShape( self.Input.meta.shape )
        total_roi = map( tuple, total_roi )

        # Default to full roi
        new_start, new_stop = total_roi

        if self.RegionStart.ready():
            # RegionStart is permitted to contain 'None' values, which we replace with zeros
            new_start = map(lambda x: x or 0, self.RegionStart.value)

        if self.RegionStop.ready():
            # RegionStop is permitted to contain 'None' values, 
            #  which we replace with the full extent of the corresponding axis
            new_stop = map( lambda (x, extent): x or extent, zip(self.RegionStop.value, total_roi[1]) )
        else:
            self._opSubRegion.Stop.setValue( tuple(total_roi[1]) )

        if not self._opSubRegion.Start.ready() or \
           not self._opSubRegion.Stop.ready() or \
           self._opSubRegion.Start.value != new_start or \
           self._opSubRegion.Stop.value != new_stop:
            # Disconnect first to ensure that the start/stop slots are applied together (atomically)
            self._opSubRegion.Stop.disconnect()

            # Provide the coordinate offset, but only for the axes that are present in the output image
            tagged_input_offset = collections.defaultdict( lambda: -1, zip(self.Input.meta.getAxisKeys(), new_start ) )
            output_axes = self._opReorderAxes.AxisOrder.value
            output_offset = [ tagged_input_offset[axis] for axis in output_axes ]
            output_offset = tuple( filter( lambda x: x != -1, output_offset ) )
            self._opExportSlot.CoordinateOffset.setValue( output_offset )

            self._opSubRegion.Start.setValue( tuple(new_start) )
            self._opSubRegion.Stop.setValue( tuple(new_stop) )

        # Set up normalization and dtype conversion
        export_dtype = self.Input.meta.dtype
        if self.ExportDtype.ready():
            export_dtype = self.ExportDtype.value

        need_normalize = ( self.InputMin.ready() and 
                           self.InputMax.ready() and 
                           self.ExportMin.ready() and 
                           self.ExportMax.ready() )
        if need_normalize:
            minVal, maxVal = self.InputMin.value, self.InputMax.value
            outputMinVal, outputMaxVal = self.ExportMin.value, self.ExportMax.value

            # Force a drange onto the input slot metadata.
            # opNormalizeAndConvert is an OpPixelOperator, 
            #  which transforms the drange correctly in this case.
            self._opDrangeInjection.Metadata.setValue( { 'drange' : (minVal, maxVal) } )
            
            def normalize(a):
                numerator = numpy.float64(outputMaxVal) - numpy.float64(outputMinVal)
                denominator = numpy.float64(maxVal) - numpy.float64(minVal)
                if denominator != 0.0:
                    frac = numpy.float32(numerator / denominator)
                else:
                    # Denominator was zero.  The user is probably just temporarily changing the values.
                    frac = numpy.float32(0.0)
                result = numpy.asarray(outputMinVal + (a - minVal) * frac, export_dtype)
                return result
            self._opNormalizeAndConvert.Function.setValue( normalize )

            # The OpPixelOperator sets the drange correctly using the function we give it.
            output_drange = self._opNormalizeAndConvert.Output.meta.drange
            assert type(output_drange[0]) == export_dtype
            assert type(output_drange[1]) == export_dtype
        else:
            # We have no drange to set.
            # If the original slot metadata had a drange, 
            #  it will be propagated downstream anyway.
            self._opDrangeInjection.Metadata.setValue( {} )

            # No normalization: just identity function with dtype conversion
            self._opNormalizeAndConvert.Function.setValue( lambda a: numpy.asarray(a, export_dtype) )

        # Use user-provided axis order if specified
        if self.OutputAxisOrder.ready():
            self._opReorderAxes.AxisOrder.setValue( self.OutputAxisOrder.value )
        else:
            axistags = self.Input.meta.axistags
            self._opReorderAxes.AxisOrder.setValue( "".join( tag.key for tag in axistags ) )

        # Obtain values for possible name fields
        roi = [ tuple(self._opSubRegion.Start.value), tuple(self._opSubRegion.Stop.value) ]
        known_keys = { 'roi' : roi }

        # Blank the internal path while we update the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opExportSlot.OutputInternalPath.setValue( "" )
        
        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_path = format_known_keys( name_format, known_keys )
        self._opExportSlot.OutputFilenameFormat.setValue( partially_formatted_path )

        internal_dataset_format = self.OutputInternalPath.value 
        partially_formatted_dataset_name = format_known_keys( internal_dataset_format, known_keys )
        self._opExportSlot.OutputInternalPath.setValue( partially_formatted_dataset_name )
コード例 #18
0
    def setupOutputs(self):
        new_start, new_stop = self.get_new_roi()
        # If we're in the process of switching input data,
        #  then the roi dimensionality might not match up.
        #  Just leave the roi disconnected for now.
        if len(self.Input.meta.shape) != len(new_start) or len(self.Input.meta.shape) != len(new_stop):
            self._opSubRegion.Roi.disconnect()
        elif not self._opSubRegion.Roi.ready() or self._opSubRegion.Roi.value != (new_start, new_stop):
            self._opSubRegion.Roi.setValue((new_start, new_stop))

        # Set up normalization and dtype conversion
        export_dtype = self.Input.meta.dtype
        if self.ExportDtype.ready():
            export_dtype = self.ExportDtype.value

        need_normalize = (
            self.InputMin.ready() and self.InputMax.ready() and self.ExportMin.ready() and self.ExportMax.ready()
        )
        if need_normalize:
            minVal, maxVal = self.InputMin.value, self.InputMax.value
            outputMinVal, outputMaxVal = self.ExportMin.value, self.ExportMax.value

            # Force a drange onto the input slot metadata.
            # opNormalizeAndConvert is an OpPixelOperator,
            #  which transforms the drange correctly in this case.
            self._opDrangeInjection.Metadata.setValue({"drange": (minVal, maxVal)})

            def normalize(a):
                numerator = numpy.float64(outputMaxVal) - numpy.float64(outputMinVal)
                denominator = numpy.float64(maxVal) - numpy.float64(minVal)
                if denominator != 0.0:
                    frac = numpy.float32(numerator / denominator)
                else:
                    # Denominator was zero.  The user is probably just temporarily changing the values.
                    frac = numpy.float32(0.0)
                result = numpy.asarray(outputMinVal + (a - minVal) * frac, export_dtype)
                return result

            self._opNormalizeAndConvert.Function.setValue(normalize)

            # The OpPixelOperator sets the drange correctly using the function we give it.
            output_drange = self._opNormalizeAndConvert.Output.meta.drange
            assert type(output_drange[0]) == export_dtype
            assert type(output_drange[1]) == export_dtype
        else:
            # We have no drange to set.
            # If the original slot metadata had a drange,
            #  it will be propagated downstream anyway.
            self._opDrangeInjection.Metadata.setValue({})

            # No normalization: just identity function with dtype conversion
            self._opNormalizeAndConvert.Function.setValue(lambda a: numpy.asarray(a, export_dtype))

        # Use user-provided axis order if specified
        user_provided = False
        if self.OutputAxisOrder.ready():
            try:
                self._opReorderAxes.AxisOrder.setValue(self.OutputAxisOrder.value)
                user_provided = True
            except KeyError:
                # FIXME: Why does the above line fail sometimes?
                warnings.warn("Ignoring invalid axis order setting")

        if not user_provided:
            if self.Input.meta.original_axistags is None:
                axiskeys = self.Input.meta.getAxisKeys()
            else:
                axiskeys = self.Input.meta.getOriginalAxisKeys()

            self._opReorderAxes.AxisOrder.setValue("".join(axiskeys))

        # Provide the coordinate offset, but only for the axes that are present in the output image
        tagged_input_offset = collections.defaultdict(lambda: -1, list(zip(self.Input.meta.getAxisKeys(), new_start)))
        output_axes = self._opReorderAxes.AxisOrder.value
        output_offset = [tagged_input_offset[axis] for axis in output_axes]
        output_offset = tuple([x for x in output_offset if x != -1])
        self._opExportSlot.CoordinateOffset.setValue(output_offset)

        # Obtain values for possible name fields
        known_keys = {"roi": list(self._opSubRegion.Roi.value)}
        roi = numpy.array(self._opSubRegion.Roi.value)
        for key, (start, stop) in zip(self.Input.meta.getAxisKeys(), roi.transpose()):
            known_keys[key + "_start"] = start
            known_keys[key + "_stop"] = stop

        # Blank the internal path while we update the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opExportSlot.OutputInternalPath.setValue("")

        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_path = format_known_keys(name_format, known_keys, strict=False)
        self._opExportSlot.OutputFilenameFormat.setValue(partially_formatted_path)

        internal_dataset_format = self.OutputInternalPath.value
        partially_formatted_dataset_name = format_known_keys(internal_dataset_format, known_keys)
        self._opExportSlot.OutputInternalPath.setValue(partially_formatted_dataset_name)
コード例 #19
0
    def setupOutputs(self):
        # Prepare subregion operator
        total_roi = roiFromShape(self.Input.meta.shape)
        total_roi = list(map(tuple, total_roi))

        # Default to full roi
        new_start, new_stop = total_roi

        if self.RegionStart.ready():
            # RegionStart is permitted to contain 'None' values, which we replace with zeros
            new_start = [x or 0 for x in self.RegionStart.value]

        if self.RegionStop.ready():
            # RegionStop is permitted to contain 'None' values,
            #  which we replace with the full extent of the corresponding axis
            new_stop = [x_extent[0] or x_extent[1] for x_extent in zip(self.RegionStop.value, total_roi[1])]

        clipped_start = numpy.maximum(0, new_start)
        clipped_stop = numpy.minimum(total_roi[1], new_stop)
        if (clipped_start != new_start).any() or (clipped_stop != new_stop).any():
            warnings.warn(
                "The ROI you are attempting to export exceeds the extents of your dataset.  Clipping to dataset bounds."
            )

        new_start, new_stop = tuple(clipped_start), tuple(clipped_stop)

        # If we're in the process of switching input data,
        #  then the roi dimensionality might not match up.
        #  Just leave the roi disconnected for now.
        if len(self.Input.meta.shape) != len(new_start) or len(self.Input.meta.shape) != len(new_stop):
            self._opSubRegion.Roi.disconnect()
        elif not self._opSubRegion.Roi.ready() or self._opSubRegion.Roi.value != (new_start, new_stop):
            self._opSubRegion.Roi.setValue((new_start, new_stop))

        # Set up normalization and dtype conversion
        export_dtype = self.Input.meta.dtype
        if self.ExportDtype.ready():
            export_dtype = self.ExportDtype.value

        need_normalize = (
            self.InputMin.ready() and self.InputMax.ready() and self.ExportMin.ready() and self.ExportMax.ready()
        )
        if need_normalize:
            minVal, maxVal = self.InputMin.value, self.InputMax.value
            outputMinVal, outputMaxVal = self.ExportMin.value, self.ExportMax.value

            # Force a drange onto the input slot metadata.
            # opNormalizeAndConvert is an OpPixelOperator,
            #  which transforms the drange correctly in this case.
            self._opDrangeInjection.Metadata.setValue({"drange": (minVal, maxVal)})

            def normalize(a):
                numerator = numpy.float64(outputMaxVal) - numpy.float64(outputMinVal)
                denominator = numpy.float64(maxVal) - numpy.float64(minVal)
                if denominator != 0.0:
                    frac = numpy.float32(numerator / denominator)
                else:
                    # Denominator was zero.  The user is probably just temporarily changing the values.
                    frac = numpy.float32(0.0)
                result = numpy.asarray(outputMinVal + (a - minVal) * frac, export_dtype)
                return result

            self._opNormalizeAndConvert.Function.setValue(normalize)

            # The OpPixelOperator sets the drange correctly using the function we give it.
            output_drange = self._opNormalizeAndConvert.Output.meta.drange
            assert type(output_drange[0]) == export_dtype
            assert type(output_drange[1]) == export_dtype
        else:
            # We have no drange to set.
            # If the original slot metadata had a drange,
            #  it will be propagated downstream anyway.
            self._opDrangeInjection.Metadata.setValue({})

            # No normalization: just identity function with dtype conversion
            self._opNormalizeAndConvert.Function.setValue(lambda a: numpy.asarray(a, export_dtype))

        # Use user-provided axis order if specified
        user_provided = False
        if self.OutputAxisOrder.ready():
            try:
                self._opReorderAxes.AxisOrder.setValue(self.OutputAxisOrder.value)
                user_provided = True
            except KeyError:
                # FIXME: Why does the above line fail sometimes?
                warnings.warn("Ignoring invalid axis order setting")

        if not user_provided:
            if self.Input.meta.original_axistags is None:
                axiskeys = self.Input.meta.getAxisKeys()
            else:
                axiskeys = self.Input.meta.getOriginalAxisKeys()

            self._opReorderAxes.AxisOrder.setValue("".join(axiskeys))

        # Provide the coordinate offset, but only for the axes that are present in the output image
        tagged_input_offset = collections.defaultdict(lambda: -1, list(zip(self.Input.meta.getAxisKeys(), new_start)))
        output_axes = self._opReorderAxes.AxisOrder.value
        output_offset = [tagged_input_offset[axis] for axis in output_axes]
        output_offset = tuple([x for x in output_offset if x != -1])
        self._opExportSlot.CoordinateOffset.setValue(output_offset)

        # Obtain values for possible name fields
        known_keys = {"roi": list(self._opSubRegion.Roi.value)}
        roi = numpy.array(self._opSubRegion.Roi.value)
        for key, (start, stop) in zip(self.Input.meta.getAxisKeys(), roi.transpose()):
            known_keys[key + "_start"] = start
            known_keys[key + "_stop"] = stop

        # Blank the internal path while we update the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opExportSlot.OutputInternalPath.setValue("")

        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_path = format_known_keys(name_format, known_keys, strict=False)
        self._opExportSlot.OutputFilenameFormat.setValue(partially_formatted_path)

        internal_dataset_format = self.OutputInternalPath.value
        partially_formatted_dataset_name = format_known_keys(internal_dataset_format, known_keys)
        self._opExportSlot.OutputInternalPath.setValue(partially_formatted_dataset_name)
コード例 #20
0
    def setupOutputs(self):
        # Prepare subregion operator
        total_roi = roiFromShape(self.Input.meta.shape)
        total_roi = map(tuple, total_roi)

        # Default to full roi
        new_start, new_stop = total_roi

        if self.RegionStart.ready():
            # RegionStart is permitted to contain 'None' values, which we replace with zeros
            new_start = map(lambda x: x or 0, self.RegionStart.value)

        if self.RegionStop.ready():
            # RegionStop is permitted to contain 'None' values,
            #  which we replace with the full extent of the corresponding axis
            new_stop = map(lambda (x, extent): x or extent,
                           zip(self.RegionStop.value, total_roi[1]))
        else:
            self._opSubRegion.Stop.setValue(tuple(total_roi[1]))

        if not self._opSubRegion.Start.ready() or \
           not self._opSubRegion.Stop.ready() or \
           self._opSubRegion.Start.value != new_start or \
           self._opSubRegion.Stop.value != new_stop:
            # Disconnect first to ensure that the start/stop slots are applied together (atomically)
            self._opSubRegion.Stop.disconnect()

            # Provide the coordinate offset, but only for the axes that are present in the output image
            tagged_input_offset = collections.defaultdict(
                lambda: -1, zip(self.Input.meta.getAxisKeys(), new_start))
            output_axes = self._opReorderAxes.AxisOrder.value
            output_offset = [tagged_input_offset[axis] for axis in output_axes]
            output_offset = tuple(filter(lambda x: x != -1, output_offset))
            self._opExportSlot.CoordinateOffset.setValue(output_offset)

            self._opSubRegion.Start.setValue(tuple(new_start))
            self._opSubRegion.Stop.setValue(tuple(new_stop))

        # Set up normalization and dtype conversion
        export_dtype = self.Input.meta.dtype
        if self.ExportDtype.ready():
            export_dtype = self.ExportDtype.value

        need_normalize = (self.InputMin.ready() and self.InputMax.ready()
                          and self.ExportMin.ready()
                          and self.ExportMax.ready())
        if need_normalize:
            minVal, maxVal = self.InputMin.value, self.InputMax.value
            outputMinVal, outputMaxVal = self.ExportMin.value, self.ExportMax.value

            # Force a drange onto the input slot metadata.
            # opNormalizeAndConvert is an OpPixelOperator,
            #  which transforms the drange correctly in this case.
            self._opDrangeInjection.Metadata.setValue(
                {'drange': (minVal, maxVal)})

            def normalize(a):
                numerator = numpy.float64(outputMaxVal) - numpy.float64(
                    outputMinVal)
                denominator = numpy.float64(maxVal) - numpy.float64(minVal)
                if denominator != 0.0:
                    frac = numpy.float32(numerator / denominator)
                else:
                    # Denominator was zero.  The user is probably just temporarily changing the values.
                    frac = numpy.float32(0.0)
                result = numpy.asarray(outputMinVal + (a - minVal) * frac,
                                       export_dtype)
                return result

            self._opNormalizeAndConvert.Function.setValue(normalize)

            # The OpPixelOperator sets the drange correctly using the function we give it.
            output_drange = self._opNormalizeAndConvert.Output.meta.drange
            assert type(output_drange[0]) == export_dtype
            assert type(output_drange[1]) == export_dtype
        else:
            # We have no drange to set.
            # If the original slot metadata had a drange,
            #  it will be propagated downstream anyway.
            self._opDrangeInjection.Metadata.setValue({})

            # No normalization: just identity function with dtype conversion
            self._opNormalizeAndConvert.Function.setValue(
                lambda a: numpy.asarray(a, export_dtype))

        # Use user-provided axis order if specified
        if self.OutputAxisOrder.ready():
            self._opReorderAxes.AxisOrder.setValue(self.OutputAxisOrder.value)
        else:
            axistags = self.Input.meta.axistags
            self._opReorderAxes.AxisOrder.setValue("".join(
                tag.key for tag in axistags))

        # Obtain values for possible name fields
        roi = [
            tuple(self._opSubRegion.Start.value),
            tuple(self._opSubRegion.Stop.value)
        ]
        known_keys = {'roi': roi}

        # Blank the internal path while we update the external path
        #  to avoid invalid intermediate states of ExportPath
        self._opExportSlot.OutputInternalPath.setValue("")

        # use partial formatting to fill in non-coordinate name fields
        name_format = self.OutputFilenameFormat.value
        partially_formatted_path = format_known_keys(name_format, known_keys)
        self._opExportSlot.OutputFilenameFormat.setValue(
            partially_formatted_path)

        internal_dataset_format = self.OutputInternalPath.value
        partially_formatted_dataset_name = format_known_keys(
            internal_dataset_format, known_keys)
        self._opExportSlot.OutputInternalPath.setValue(
            partially_formatted_dataset_name)
コード例 #21
0
    def post_process_lane_export(self, lane_index, checkOverwriteFiles=False):
        # FIXME: This probably only works for the non-blockwise export slot.
        #        We should assert that the user isn't using the blockwise slot.

        # Plugin export if selected
        logger.info("Export source is: " + self.dataExportTrackingApplet.
                    topLevelOperator.SelectedExportSource.value)

        print "in post_process_lane_export"
        if self.dataExportTrackingApplet.topLevelOperator.SelectedExportSource.value == OpTrackingBaseDataExport.PluginOnlyName:
            logger.info("Export source plugin selected!")
            selectedPlugin = self.dataExportTrackingApplet.topLevelOperator.SelectedPlugin.value

            exportPluginInfo = pluginManager.getPluginByName(
                selectedPlugin, category="TrackingExportFormats")
            if exportPluginInfo is None:
                logger.error("Could not find selected plugin %s" %
                             exportPluginInfo)
            else:
                exportPlugin = exportPluginInfo.plugin_object
                logger.info("Exporting tracking result using %s" %
                            selectedPlugin)
                name_format = self.dataExportTrackingApplet.topLevelOperator.getLane(
                    lane_index).OutputFilenameFormat.value
                partially_formatted_name = self.getPartiallyFormattedName(
                    lane_index, name_format)

                if exportPlugin.exportsToFile:
                    filename = partially_formatted_name
                    if os.path.basename(filename) == '':
                        filename = os.path.join(filename, 'pluginExport.txt')
                else:
                    filename = os.path.dirname(partially_formatted_name)

                if filename is None or len(str(filename)) == 0:
                    logger.error(
                        "Cannot export from plugin with empty output filename")
                    return

                exportStatus = self.trackingApplet.topLevelOperator.getLane(
                    lane_index).exportPlugin(filename, exportPlugin,
                                             checkOverwriteFiles)
                if not exportStatus:
                    return False
                logger.info("Export done")

            return

        # CSV Table export (only if plugin was not selected)
        settings, selected_features = self.trackingApplet.topLevelOperator.getLane(
            lane_index).get_table_export_settings()
        from lazyflow.utility import PathComponents, make_absolute, format_known_keys

        if settings:
            self.dataExportTrackingApplet.progressSignal.emit(-1)
            raw_dataset_info = self.dataSelectionApplet.topLevelOperator.DatasetGroup[
                lane_index][0].value

            project_path = self.shell.projectManager.currentProjectPath
            project_dir = os.path.dirname(project_path)
            dataset_dir = PathComponents(
                raw_dataset_info.filePath).externalDirectory
            abs_dataset_dir = make_absolute(dataset_dir, cwd=project_dir)

            known_keys = {}
            known_keys['dataset_dir'] = abs_dataset_dir
            nickname = raw_dataset_info.nickname.replace('*', '')
            if os.path.pathsep in nickname:
                nickname = PathComponents(nickname.split(
                    os.path.pathsep)[0]).fileNameBase
            known_keys['nickname'] = nickname

            # use partial formatting to fill in non-coordinate name fields
            name_format = settings['file path']
            partially_formatted_name = format_known_keys(
                name_format, known_keys)
            settings['file path'] = partially_formatted_name

            req = self.trackingApplet.topLevelOperator.getLane(
                lane_index
            ).export_object_data(
                lane_index,
                # FIXME: Even in non-headless mode, we can't show the gui because we're running in a non-main thread.
                #        That's not a huge deal, because there's still a progress bar for the overall export.
                show_gui=False)

            req.wait()
            self.dataExportTrackingApplet.progressSignal.emit(100)