예제 #1
0
    def original_non_grouped_processing(self, selected_files: [FileDescriptor],
                                        data_model: DataModel,
                                        output_file: str, console: Console):
        console.push_level()
        console.message("Using single-file processing", +1)
        # We'll use the first file in the list as a sample for things like image size
        assert len(selected_files) > 0
        # Confirm that these are all dark frames, and can be combined (same binning and dimensions)
        if FileCombiner.all_compatible_sizes(selected_files):
            self.check_cancellation()
            if data_model.get_ignore_file_type() or FileCombiner.all_of_type(
                    selected_files, FileDescriptor.FILE_TYPE_DARK):
                # Get (most common) filter name in the set
                # Since these are darks, the filter is meaningless, but we need the value
                # for the shared "create file" routine
                filter_name = SharedUtils.most_common_filter_name(
                    selected_files)

                # Do the combination
                self.combine_files(selected_files, data_model, filter_name,
                                   output_file, console)
                self.check_cancellation()
                # Files are combined.  Put away the inputs?
                # Return list of any that were moved, in case the UI needs to be adjusted
                substituted_folder_name = SharedUtils.substitute_date_time_filter_in_string(
                    data_model.get_disposition_subfolder_name())
                self.handle_input_files_disposition(
                    data_model.get_input_file_disposition(),
                    substituted_folder_name, selected_files, console)
            else:
                raise MasterMakerExceptions.NotAllDarkFrames
        else:
            raise MasterMakerExceptions.IncompatibleSizes
        console.message("Combining complete", 0)
        console.pop_level()
예제 #2
0
    def process_one_group(self, data_model: DataModel,
                          descriptor_list: [FileDescriptor],
                          output_directory: str, combine_method: int,
                          disposition_folder_name, console: Console):
        """
        Process one group of files, output to the given directory
        Exceptions thrown:
            NotAllFlatFrames        The given files are not all flat frames
            IncompatibleSizes       The given files are not all the same dimensions

        :param data_model:                  Data model giving options for current run
        :param descriptor_list:             List of all the files in one group, for processing
        :param output_directory:            Path to directory to receive the output file
        :param combine_method:              Code saying how these files should be combined
        :param disposition_folder_name:     If files to be moved after processing, name of receiving folder
        :param console:                     Re-directable console output object
        """
        assert len(descriptor_list) > 0
        sample_file: FileDescriptor = descriptor_list[0]
        console.push_level()
        self.describe_group(data_model, len(descriptor_list), sample_file,
                            console)

        # Make up a file name for this group's output, into the given directory
        file_name = SharedUtils.get_file_name_portion(
            combine_method, sample_file, data_model.get_sigma_clip_threshold(),
            data_model.get_min_max_number_clipped_per_end())
        output_file = f"{output_directory}/{file_name}"

        # Confirm that these are all flat frames, and can be combined (same binning and dimensions)
        if self.all_compatible_sizes(descriptor_list):
            if data_model.get_ignore_file_type() \
                    or FileCombiner.all_of_type(descriptor_list, FileDescriptor.FILE_TYPE_FLAT):
                # Get (most common) filter name in the set
                # Get filter name to go in the output FITS metadata.
                # All the files should be the same filter, but in case there are stragglers,
                # get the most common filter from the set
                filter_name = SharedUtils.most_common_filter_name(
                    descriptor_list)

                # Do the combination
                self.combine_files(descriptor_list, data_model, filter_name,
                                   output_file, console)
                self.check_cancellation()
                # Files are combined.  Put away the inputs?
                # Return list of any that were moved, in case the UI needs to be adjusted
                self.handle_input_files_disposition(
                    data_model.get_input_file_disposition(),
                    disposition_folder_name, descriptor_list, console)
                self.check_cancellation()
            else:
                raise MasterMakerExceptions.NotAllFlatFrames
        else:
            raise MasterMakerExceptions.IncompatibleSizes

        console.pop_level()
예제 #3
0
    def original_non_grouped_processing(self, selected_files: [FileDescriptor],
                                        data_model: DataModel,
                                        output_file: str, console: Console):
        """
        Process one set of files to a single output file.
        Output to the given path, if provided.  If not provided, prompt the user for it.
        :param selected_files:      List of descriptions of files to be combined
        :param data_model:          Data model that gives combination method and other options
        :param output_file:         Path for the combined output file
        :param console:             Re-directable console output object
        """
        console.push_level()
        console.message("Using single-file processing", +1)
        # We'll use the first file in the list as a sample for things like image size
        assert len(selected_files) > 0
        # Confirm that these are all flat frames, and can be combined (same binning and dimensions)
        if FileCombiner.all_compatible_sizes(selected_files):
            self.check_cancellation()
            if data_model.get_ignore_file_type() or FileCombiner.all_of_type(
                    selected_files, FileDescriptor.FILE_TYPE_FLAT):
                # Get (most common) filter name in the set
                # What filter should we put in the metadata for the output file?
                filter_name = SharedUtils.most_common_filter_name(
                    selected_files)

                # Do the combination
                self.combine_files(selected_files, data_model, filter_name,
                                   output_file, console)
                self.check_cancellation()
                # Files are combined.  Put away the inputs?
                # Return list of any that were moved, in case the UI needs to be adjusted
                substituted_folder_name = SharedUtils.substitute_date_time_filter_in_string(
                    data_model.get_disposition_subfolder_name())
                self.handle_input_files_disposition(
                    data_model.get_input_file_disposition(),
                    substituted_folder_name, selected_files, console)
            else:
                raise MasterMakerExceptions.NotAllFlatFrames
        else:
            raise MasterMakerExceptions.IncompatibleSizes
        console.message("Combining complete", 0)
        console.pop_level()
예제 #4
0
    def process_one_group(self, data_model: DataModel,
                          descriptor_list: [FileDescriptor],
                          output_directory: str, combine_method: int,
                          disposition_folder_name, console: Console):
        assert len(descriptor_list) > 0
        sample_file: FileDescriptor = descriptor_list[0]
        console.push_level()
        self.describe_group(data_model, len(descriptor_list), sample_file,
                            console)

        # Make up a file name for this group's output, into the given directory
        file_name = SharedUtils.get_file_name_portion(
            combine_method, sample_file, data_model.get_sigma_clip_threshold(),
            data_model.get_min_max_number_clipped_per_end())
        output_file = f"{output_directory}/{file_name}"

        # Confirm that these are all dark frames, and can be combined (same binning and dimensions)
        if self.all_compatible_sizes(descriptor_list):
            if data_model.get_ignore_file_type() \
                    or FileCombiner.all_of_type(descriptor_list, FileDescriptor.FILE_TYPE_DARK):
                # Get (most common) filter name in the set
                # Since these are darks, the filter is meaningless, but we need the value
                # for the shared "create file" routine
                filter_name = SharedUtils.most_common_filter_name(
                    descriptor_list)

                # Do the combination
                self.combine_files(descriptor_list, data_model, filter_name,
                                   output_file, console)
                self.check_cancellation()
                # Files are combined.  Put away the inputs?
                # Return list of any that were moved, in case the UI needs to be adjusted
                self.handle_input_files_disposition(
                    data_model.get_input_file_disposition(),
                    disposition_folder_name, descriptor_list, console)
                self.check_cancellation()
            else:
                raise MasterMakerExceptions.NotAllDarkFrames
        else:
            raise MasterMakerExceptions.IncompatibleSizes
        console.pop_level()
예제 #5
0
    def __init__(self, preferences: Preferences, data_model: DataModel):
        """Initialize MainWindow class"""
        self._preferences = preferences
        self._data_model = data_model
        QMainWindow.__init__(self)
        self.ui = uic.loadUi(
            MultiOsUtil.path_for_file_in_program_directory("MainWindow.ui"))
        self._field_validity: {object, bool} = {}
        self._table_model: FitsFileTableModel
        self._indent_level = 0

        # Load algorithm from preferences

        algorithm = data_model.get_master_combine_method()
        if algorithm == Constants.COMBINE_MEAN:
            self.ui.combineMeanRB.setChecked(True)
        elif algorithm == Constants.COMBINE_MEDIAN:
            self.ui.combineMedianRB.setChecked(True)
        elif algorithm == Constants.COMBINE_MINMAX:
            self.ui.combineMinMaxRB.setChecked(True)
        else:
            assert (algorithm == Constants.COMBINE_SIGMA_CLIP)
            self.ui.combineSigmaRB.setChecked(True)

        self.ui.minMaxNumDropped.setText(
            str(data_model.get_min_max_number_clipped_per_end()))
        self.ui.sigmaThreshold.setText(
            str(data_model.get_sigma_clip_threshold()))

        # Load disposition from preferences

        disposition = data_model.get_input_file_disposition()
        if disposition == Constants.INPUT_DISPOSITION_SUBFOLDER:
            self.ui.dispositionSubFolderRB.setChecked(True)
        else:
            assert (disposition == Constants.INPUT_DISPOSITION_NOTHING)
            self.ui.dispositionNothingRB.setChecked(True)
        self.ui.subFolderName.setText(
            data_model.get_disposition_subfolder_name())

        # Pre-calibration options

        precalibration_option = data_model.get_precalibration_type()
        if precalibration_option == Constants.CALIBRATION_FIXED_FILE:
            self.ui.fixedPreCalFileRB.setChecked(True)
        elif precalibration_option == Constants.CALIBRATION_NONE:
            self.ui.noPreClalibrationRB.setChecked(True)
        elif precalibration_option == Constants.CALIBRATION_AUTO_DIRECTORY:
            self.ui.autoPreCalibrationRB.setChecked(True)
        else:
            assert precalibration_option == Constants.CALIBRATION_PEDESTAL
            self.ui.fixedPedestalRB.setChecked(True)
        self.ui.fixedPedestalAmount.setText(
            str(data_model.get_precalibration_pedestal()))
        self.ui.precalibrationPathDisplay.setText(
            os.path.basename(data_model.get_precalibration_fixed_path()))
        self.ui.autoDirectoryName.setText(
            os.path.basename(data_model.get_precalibration_auto_directory()))

        self.ui.autoRecursive.setChecked(
            data_model.get_auto_directory_recursive())
        self.ui.autoBiasOnly.setChecked(
            data_model.get_auto_directory_bias_only())

        # Grouping boxes and parameters

        self.ui.groupBySizeCB.setChecked(data_model.get_group_by_size())
        self.ui.groupByExposureCB.setChecked(
            data_model.get_group_by_exposure())
        self.ui.groupByTemperatureCB.setChecked(
            data_model.get_group_by_temperature())
        self.ui.ignoreSmallGroupsCB.setChecked(
            data_model.get_ignore_groups_fewer_than())

        self.ui.exposureGroupBandwidth.setText(
            f"{data_model.get_exposure_group_bandwidth()}")
        self.ui.temperatureGroupBandwidth.setText(
            f"{data_model.get_temperature_group_bandwidth()}")
        self.ui.minimumGroupSize.setText(
            str(data_model.get_minimum_group_size()))

        # Set up the file table
        self._table_model = FitsFileTableModel(
            self.ui.filesTable, data_model.get_ignore_file_type())
        self.ui.filesTable.setModel(self._table_model)
        # Columns should resize to best fit their contents
        self.ui.filesTable.horizontalHeader().setSectionResizeMode(
            QHeaderView.ResizeToContents)

        # Write a summary, in the main tab, of the settings from the options tab (and data model)
        self.fill_options_readout()

        self.connect_responders()

        # If a window size is saved, set the window size
        window_size = self._preferences.get_main_window_size()
        if window_size is not None:
            self.ui.resize(window_size)

        self.enable_fields()
        self.enable_buttons()