예제 #1
0
    def original_non_grouped_processing(self, selected_files: [FileDescriptor],
                                        data_model: DataModel,
                                        output_file: str, console: Console):
        console.push_level()
        console.message("Using single-file processing", +1)
        # We'll use the first file in the list as a sample for things like image size
        assert len(selected_files) > 0
        # Confirm that these are all dark frames, and can be combined (same binning and dimensions)
        if FileCombiner.all_compatible_sizes(selected_files):
            self.check_cancellation()
            if data_model.get_ignore_file_type() or FileCombiner.all_of_type(
                    selected_files, FileDescriptor.FILE_TYPE_DARK):
                # Get (most common) filter name in the set
                # Since these are darks, the filter is meaningless, but we need the value
                # for the shared "create file" routine
                filter_name = SharedUtils.most_common_filter_name(
                    selected_files)

                # Do the combination
                self.combine_files(selected_files, data_model, filter_name,
                                   output_file, console)
                self.check_cancellation()
                # Files are combined.  Put away the inputs?
                # Return list of any that were moved, in case the UI needs to be adjusted
                substituted_folder_name = SharedUtils.substitute_date_time_filter_in_string(
                    data_model.get_disposition_subfolder_name())
                self.handle_input_files_disposition(
                    data_model.get_input_file_disposition(),
                    substituted_folder_name, selected_files, console)
            else:
                raise MasterMakerExceptions.NotAllDarkFrames
        else:
            raise MasterMakerExceptions.IncompatibleSizes
        console.message("Combining complete", 0)
        console.pop_level()
예제 #2
0
    def original_non_grouped_processing(self, selected_files: [FileDescriptor],
                                        data_model: DataModel,
                                        output_file: str, console: Console):
        """
        Process one set of files to a single output file.
        Output to the given path, if provided.  If not provided, prompt the user for it.
        :param selected_files:      List of descriptions of files to be combined
        :param data_model:          Data model that gives combination method and other options
        :param output_file:         Path for the combined output file
        :param console:             Re-directable console output object
        """
        console.push_level()
        console.message("Using single-file processing", +1)
        # We'll use the first file in the list as a sample for things like image size
        assert len(selected_files) > 0
        # Confirm that these are all flat frames, and can be combined (same binning and dimensions)
        if FileCombiner.all_compatible_sizes(selected_files):
            self.check_cancellation()
            if data_model.get_ignore_file_type() or FileCombiner.all_of_type(
                    selected_files, FileDescriptor.FILE_TYPE_FLAT):
                # Get (most common) filter name in the set
                # What filter should we put in the metadata for the output file?
                filter_name = SharedUtils.most_common_filter_name(
                    selected_files)

                # Do the combination
                self.combine_files(selected_files, data_model, filter_name,
                                   output_file, console)
                self.check_cancellation()
                # Files are combined.  Put away the inputs?
                # Return list of any that were moved, in case the UI needs to be adjusted
                substituted_folder_name = SharedUtils.substitute_date_time_filter_in_string(
                    data_model.get_disposition_subfolder_name())
                self.handle_input_files_disposition(
                    data_model.get_input_file_disposition(),
                    substituted_folder_name, selected_files, console)
            else:
                raise MasterMakerExceptions.NotAllFlatFrames
        else:
            raise MasterMakerExceptions.IncompatibleSizes
        console.message("Combining complete", 0)
        console.pop_level()
예제 #3
0
    def process_groups(self, data_model: DataModel,
                       selected_files: [FileDescriptor], output_directory: str,
                       console: Console):
        """
        Process the given selected files in groups by size, exposure, or temperature (or any combination)
        Exceptions thrown:
            NoGroupOutputDirectory      Output directory does not exist and unable to create it
        :param data_model:          Data model specifying options for the current run
        :param selected_files:      List of descriptions of files to be grouped then processed
        :param output_directory:    Directory to contain output files from processed groups
        :param console:             Re-directable console output object
        """
        console.push_level()
        temperature_bandwidth = data_model.get_temperature_group_bandwidth()
        disposition_folder = data_model.get_disposition_subfolder_name()
        substituted_folder_name = SharedUtils.substitute_date_time_filter_in_string(
            disposition_folder)
        console.message(
            "Process groups into output directory: " + output_directory, +1)
        if not SharedUtils.ensure_directory_exists(output_directory):
            raise MasterMakerExceptions.NoGroupOutputDirectory(
                output_directory)
        minimum_group_size = data_model.get_minimum_group_size() \
            if data_model.get_ignore_groups_fewer_than() else 0

        #  Process size groups, or all sizes if not grouping
        groups_by_size = self.get_groups_by_size(
            selected_files, data_model.get_group_by_size())
        group_by_size = data_model.get_group_by_size()
        group_by_temperature = data_model.get_group_by_temperature()
        group_by_filter = data_model.get_group_by_filter()
        for size_group in groups_by_size:
            self.check_cancellation()
            console.push_level()
            # Message about this group only if this grouping was requested
            if len(size_group) < minimum_group_size:
                if group_by_size:
                    console.message(
                        f"Ignoring one size group: {len(size_group)} "
                        f"files {size_group[0].get_size_key()}", +1)
            else:
                if group_by_size:
                    console.message(
                        f"Processing one size group: {len(size_group)} "
                        f"files {size_group[0].get_size_key()}", +1)
                # Within this size group, process temperature groups, or all temperatures if not grouping
                groups_by_temperature = \
                    self.get_groups_by_temperature(size_group,
                                                   data_model.get_group_by_temperature(),
                                                   temperature_bandwidth)
                for temperature_group in groups_by_temperature:
                    self.check_cancellation()
                    console.push_level()
                    (_, mean_temperature
                     ) = ImageMath.mean_exposure_and_temperature(
                         temperature_group)
                    if len(temperature_group) < minimum_group_size:
                        if group_by_temperature:
                            console.message(
                                f"Ignoring one temperature group: {len(temperature_group)} "
                                f"files with mean temperature {mean_temperature:.1f}",
                                +1)
                    else:
                        if group_by_temperature:
                            console.message(
                                f"Processing one temperature group: {len(temperature_group)} "
                                f"files with mean temperature {mean_temperature:.1f}",
                                +1)
                        # Within this temperature group, process filter groups, or all filters if not grouping
                        groups_by_filter = \
                            self.get_groups_by_filter(temperature_group,
                                                      data_model.get_group_by_filter())
                        for filter_group in groups_by_filter:
                            self.check_cancellation()
                            console.push_level()
                            filter_name = filter_group[0].get_filter_name()
                            if len(filter_group) < minimum_group_size:
                                if group_by_filter:
                                    console.message(
                                        f"Ignoring one filter group: {len(filter_group)} "
                                        f"files with {filter_name} filter ",
                                        +1)
                            else:
                                if group_by_filter:
                                    console.message(
                                        f"Processing one filter group: {len(filter_group)} "
                                        f"files with {filter_name} filter ",
                                        +1)
                                self.process_one_group(
                                    data_model, filter_group, output_directory,
                                    data_model.get_master_combine_method(),
                                    substituted_folder_name, console)
                            console.pop_level()
                        self.check_cancellation()
                    console.pop_level()
            console.pop_level()
        console.message("Group combining complete", 0)
        console.pop_level()
예제 #4
0
    def __init__(self, preferences: Preferences, data_model: DataModel):
        """Initialize MainWindow class"""
        self._preferences = preferences
        self._data_model = data_model
        QMainWindow.__init__(self)
        self.ui = uic.loadUi(
            MultiOsUtil.path_for_file_in_program_directory("MainWindow.ui"))
        self._field_validity: {object, bool} = {}
        self._table_model: FitsFileTableModel
        self._indent_level = 0

        # Load algorithm from preferences

        algorithm = data_model.get_master_combine_method()
        if algorithm == Constants.COMBINE_MEAN:
            self.ui.combineMeanRB.setChecked(True)
        elif algorithm == Constants.COMBINE_MEDIAN:
            self.ui.combineMedianRB.setChecked(True)
        elif algorithm == Constants.COMBINE_MINMAX:
            self.ui.combineMinMaxRB.setChecked(True)
        else:
            assert (algorithm == Constants.COMBINE_SIGMA_CLIP)
            self.ui.combineSigmaRB.setChecked(True)

        self.ui.minMaxNumDropped.setText(
            str(data_model.get_min_max_number_clipped_per_end()))
        self.ui.sigmaThreshold.setText(
            str(data_model.get_sigma_clip_threshold()))

        # Load disposition from preferences

        disposition = data_model.get_input_file_disposition()
        if disposition == Constants.INPUT_DISPOSITION_SUBFOLDER:
            self.ui.dispositionSubFolderRB.setChecked(True)
        else:
            assert (disposition == Constants.INPUT_DISPOSITION_NOTHING)
            self.ui.dispositionNothingRB.setChecked(True)
        self.ui.subFolderName.setText(
            data_model.get_disposition_subfolder_name())

        # Pre-calibration options

        precalibration_option = data_model.get_precalibration_type()
        if precalibration_option == Constants.CALIBRATION_FIXED_FILE:
            self.ui.fixedPreCalFileRB.setChecked(True)
        elif precalibration_option == Constants.CALIBRATION_NONE:
            self.ui.noPreClalibrationRB.setChecked(True)
        elif precalibration_option == Constants.CALIBRATION_AUTO_DIRECTORY:
            self.ui.autoPreCalibrationRB.setChecked(True)
        else:
            assert precalibration_option == Constants.CALIBRATION_PEDESTAL
            self.ui.fixedPedestalRB.setChecked(True)
        self.ui.fixedPedestalAmount.setText(
            str(data_model.get_precalibration_pedestal()))
        self.ui.precalibrationPathDisplay.setText(
            os.path.basename(data_model.get_precalibration_fixed_path()))
        self.ui.autoDirectoryName.setText(
            os.path.basename(data_model.get_precalibration_auto_directory()))

        self.ui.autoRecursive.setChecked(
            data_model.get_auto_directory_recursive())
        self.ui.autoBiasOnly.setChecked(
            data_model.get_auto_directory_bias_only())

        # Grouping boxes and parameters

        self.ui.groupBySizeCB.setChecked(data_model.get_group_by_size())
        self.ui.groupByExposureCB.setChecked(
            data_model.get_group_by_exposure())
        self.ui.groupByTemperatureCB.setChecked(
            data_model.get_group_by_temperature())
        self.ui.ignoreSmallGroupsCB.setChecked(
            data_model.get_ignore_groups_fewer_than())

        self.ui.exposureGroupBandwidth.setText(
            f"{data_model.get_exposure_group_bandwidth()}")
        self.ui.temperatureGroupBandwidth.setText(
            f"{data_model.get_temperature_group_bandwidth()}")
        self.ui.minimumGroupSize.setText(
            str(data_model.get_minimum_group_size()))

        # Set up the file table
        self._table_model = FitsFileTableModel(
            self.ui.filesTable, data_model.get_ignore_file_type())
        self.ui.filesTable.setModel(self._table_model)
        # Columns should resize to best fit their contents
        self.ui.filesTable.horizontalHeader().setSectionResizeMode(
            QHeaderView.ResizeToContents)

        # Write a summary, in the main tab, of the settings from the options tab (and data model)
        self.fill_options_readout()

        self.connect_responders()

        # If a window size is saved, set the window size
        window_size = self._preferences.get_main_window_size()
        if window_size is not None:
            self.ui.resize(window_size)

        self.enable_fields()
        self.enable_buttons()
예제 #5
0
    def process_groups(self, data_model: DataModel,
                       selected_files: [FileDescriptor], output_directory: str,
                       console: Console):
        console.push_level()
        exposure_bandwidth = data_model.get_exposure_group_bandwidth()
        temperature_bandwidth = data_model.get_temperature_group_bandwidth()
        disposition_folder = data_model.get_disposition_subfolder_name()
        substituted_folder_name = SharedUtils.substitute_date_time_filter_in_string(
            disposition_folder)
        console.message(
            "Process groups into output directory: " + output_directory, +1)
        if not SharedUtils.ensure_directory_exists(output_directory):
            raise MasterMakerExceptions.NoGroupOutputDirectory(
                output_directory)
        minimum_group_size = data_model.get_minimum_group_size() \
            if data_model.get_ignore_groups_fewer_than() else 0

        #  Process size groups, or all sizes if not grouping
        groups_by_size = self.get_groups_by_size(
            selected_files, data_model.get_group_by_size())
        group_by_size = data_model.get_group_by_size()
        group_by_exposure = data_model.get_group_by_exposure()
        group_by_temperature = data_model.get_group_by_temperature()
        for size_group in groups_by_size:
            self.check_cancellation()
            console.push_level()
            # Message about this group only if this grouping was requested
            if len(size_group) < minimum_group_size:
                if group_by_size:
                    console.message(
                        f"Ignoring one size group: {len(size_group)} "
                        f"files {size_group[0].get_size_key()}", +1)
            else:
                if group_by_size:
                    console.message(
                        f"Processing one size group: {len(size_group)} "
                        f"files {size_group[0].get_size_key()}", +1)
                # Within this size group, process exposure groups, or all exposures if not grouping
                groups_by_exposure = self.get_groups_by_exposure(
                    size_group, data_model.get_group_by_exposure(),
                    exposure_bandwidth)
                for exposure_group in groups_by_exposure:
                    self.check_cancellation()
                    (mean_exposure,
                     _) = ImageMath.mean_exposure_and_temperature(
                         exposure_group)
                    console.push_level()
                    if len(exposure_group) < minimum_group_size:
                        if group_by_exposure:
                            console.message(
                                f"Ignoring one exposure group: {len(exposure_group)} "
                                f"files exposed at mean {mean_exposure:.2f} seconds",
                                +1)
                    else:
                        if group_by_exposure:
                            console.message(
                                f"Processing one exposure group: {len(exposure_group)} "
                                f"files exposed at mean {mean_exposure:.2f} seconds",
                                +1)
                        # Within this exposure group, process temperature groups, or all temperatures if not grouping
                        groups_by_temperature = \
                            self.get_groups_by_temperature(exposure_group,
                                                           data_model.get_group_by_temperature(),
                                                           temperature_bandwidth)
                        for temperature_group in groups_by_temperature:
                            self.check_cancellation()
                            console.push_level()
                            (_, mean_temperature
                             ) = ImageMath.mean_exposure_and_temperature(
                                 temperature_group)
                            if len(temperature_group) < minimum_group_size:
                                if group_by_temperature:
                                    console.message(
                                        f"Ignoring one temperature group: {len(temperature_group)} "
                                        f"files with mean temperature {mean_temperature:.1f}",
                                        +1)
                            else:
                                if group_by_temperature:
                                    console.message(
                                        f"Processing one temperature group: {len(temperature_group)} "
                                        f"files with mean temperature {mean_temperature:.1f}",
                                        +1)
                                # Now we have a list of descriptors, grouped as appropriate, to process
                                self.process_one_group(
                                    data_model, temperature_group,
                                    output_directory,
                                    data_model.get_master_combine_method(),
                                    substituted_folder_name, console)
                                self.check_cancellation()
                            console.pop_level()
                    console.pop_level()
            console.pop_level()
        console.message("Group combining complete", 0)
        console.pop_level()