def process_groups(self, data_model: DataModel, selected_files: [FileDescriptor], output_directory: str, console: Console): """ Process the given selected files in groups by size, exposure, or temperature (or any combination) Exceptions thrown: NoGroupOutputDirectory Output directory does not exist and unable to create it :param data_model: Data model specifying options for the current run :param selected_files: List of descriptions of files to be grouped then processed :param output_directory: Directory to contain output files from processed groups :param console: Re-directable console output object """ console.push_level() temperature_bandwidth = data_model.get_temperature_group_bandwidth() disposition_folder = data_model.get_disposition_subfolder_name() substituted_folder_name = SharedUtils.substitute_date_time_filter_in_string( disposition_folder) console.message( "Process groups into output directory: " + output_directory, +1) if not SharedUtils.ensure_directory_exists(output_directory): raise MasterMakerExceptions.NoGroupOutputDirectory( output_directory) minimum_group_size = data_model.get_minimum_group_size() \ if data_model.get_ignore_groups_fewer_than() else 0 # Process size groups, or all sizes if not grouping groups_by_size = self.get_groups_by_size( selected_files, data_model.get_group_by_size()) group_by_size = data_model.get_group_by_size() group_by_temperature = data_model.get_group_by_temperature() group_by_filter = data_model.get_group_by_filter() for size_group in groups_by_size: self.check_cancellation() console.push_level() # Message about this group only if this grouping was requested if len(size_group) < minimum_group_size: if group_by_size: console.message( f"Ignoring one size group: {len(size_group)} " f"files {size_group[0].get_size_key()}", +1) else: if group_by_size: console.message( f"Processing one size group: {len(size_group)} " f"files {size_group[0].get_size_key()}", +1) # Within this size group, process temperature groups, or all temperatures if not grouping groups_by_temperature = \ self.get_groups_by_temperature(size_group, data_model.get_group_by_temperature(), temperature_bandwidth) for temperature_group in groups_by_temperature: self.check_cancellation() console.push_level() (_, mean_temperature ) = ImageMath.mean_exposure_and_temperature( temperature_group) if len(temperature_group) < minimum_group_size: if group_by_temperature: console.message( f"Ignoring one temperature group: {len(temperature_group)} " f"files with mean temperature {mean_temperature:.1f}", +1) else: if group_by_temperature: console.message( f"Processing one temperature group: {len(temperature_group)} " f"files with mean temperature {mean_temperature:.1f}", +1) # Within this temperature group, process filter groups, or all filters if not grouping groups_by_filter = \ self.get_groups_by_filter(temperature_group, data_model.get_group_by_filter()) for filter_group in groups_by_filter: self.check_cancellation() console.push_level() filter_name = filter_group[0].get_filter_name() if len(filter_group) < minimum_group_size: if group_by_filter: console.message( f"Ignoring one filter group: {len(filter_group)} " f"files with {filter_name} filter ", +1) else: if group_by_filter: console.message( f"Processing one filter group: {len(filter_group)} " f"files with {filter_name} filter ", +1) self.process_one_group( data_model, filter_group, output_directory, data_model.get_master_combine_method(), substituted_folder_name, console) console.pop_level() self.check_cancellation() console.pop_level() console.pop_level() console.message("Group combining complete", 0) console.pop_level()
def combine_files(self, input_files: [FileDescriptor], data_model: DataModel, filter_name: str, output_path: str, console: Console): """ Combine the given files, output to the given output file using the combination method defined in the data model. :param input_files: List of files to be combined :param data_model: Data model with options for this run :param filter_name: Human-readable filter name (for output file name and FITS comment) :param output_path: Path for output fiel to be created :param console: Redirectable console output object """ console.push_level( ) # Stack console indentation level to easily restore when done substituted_file_name = SharedUtils.substitute_date_time_filter_in_string( output_path) file_names = [d.get_absolute_path() for d in input_files] combine_method = data_model.get_master_combine_method() # Get info about any precalibration that is to be done calibrator = Calibrator(data_model) calibration_tag = calibrator.fits_comment_tag() assert len(input_files) > 0 binning: int = input_files[0].get_binning() (mean_exposure, mean_temperature ) = ImageMath.mean_exposure_and_temperature(input_files) if combine_method == Constants.COMBINE_MEAN: mean_data = ImageMath.combine_mean(file_names, calibrator, console, self._session_controller) self.check_cancellation() RmFitsUtil.create_combined_fits_file( substituted_file_name, mean_data, FileDescriptor.FILE_TYPE_FLAT, "Flat Frame", mean_exposure, mean_temperature, filter_name, binning, f"Master Flat MEAN combined {calibration_tag}") elif combine_method == Constants.COMBINE_MEDIAN: median_data = ImageMath.combine_median(file_names, calibrator, console, self._session_controller) self.check_cancellation() RmFitsUtil.create_combined_fits_file( substituted_file_name, median_data, FileDescriptor.FILE_TYPE_FLAT, "Flat Frame", mean_exposure, mean_temperature, filter_name, binning, f"Master Flat MEDIAN combined {calibration_tag}") elif combine_method == Constants.COMBINE_MINMAX: number_dropped_points = data_model.get_min_max_number_clipped_per_end( ) min_max_clipped_mean = ImageMath.combine_min_max_clip( file_names, number_dropped_points, calibrator, console, self._session_controller) self.check_cancellation() assert min_max_clipped_mean is not None RmFitsUtil.create_combined_fits_file( substituted_file_name, min_max_clipped_mean, FileDescriptor.FILE_TYPE_FLAT, "Flat Frame", mean_exposure, mean_temperature, filter_name, binning, f"Master Flat Min/Max Clipped " f"(drop {number_dropped_points}) Mean combined" f" {calibration_tag}") else: assert combine_method == Constants.COMBINE_SIGMA_CLIP sigma_threshold = data_model.get_sigma_clip_threshold() sigma_clipped_mean = ImageMath.combine_sigma_clip( file_names, sigma_threshold, calibrator, console, self._session_controller) self.check_cancellation() assert sigma_clipped_mean is not None RmFitsUtil.create_combined_fits_file( substituted_file_name, sigma_clipped_mean, FileDescriptor.FILE_TYPE_FLAT, "Flat Frame", mean_exposure, mean_temperature, filter_name, binning, f"Master Flat Sigma Clipped " f"(threshold {sigma_threshold}) Mean combined" f" {calibration_tag}") console.pop_level()
def process_groups(self, data_model: DataModel, selected_files: [FileDescriptor], output_directory: str, console: Console): console.push_level() exposure_bandwidth = data_model.get_exposure_group_bandwidth() temperature_bandwidth = data_model.get_temperature_group_bandwidth() disposition_folder = data_model.get_disposition_subfolder_name() substituted_folder_name = SharedUtils.substitute_date_time_filter_in_string( disposition_folder) console.message( "Process groups into output directory: " + output_directory, +1) if not SharedUtils.ensure_directory_exists(output_directory): raise MasterMakerExceptions.NoGroupOutputDirectory( output_directory) minimum_group_size = data_model.get_minimum_group_size() \ if data_model.get_ignore_groups_fewer_than() else 0 # Process size groups, or all sizes if not grouping groups_by_size = self.get_groups_by_size( selected_files, data_model.get_group_by_size()) group_by_size = data_model.get_group_by_size() group_by_exposure = data_model.get_group_by_exposure() group_by_temperature = data_model.get_group_by_temperature() for size_group in groups_by_size: self.check_cancellation() console.push_level() # Message about this group only if this grouping was requested if len(size_group) < minimum_group_size: if group_by_size: console.message( f"Ignoring one size group: {len(size_group)} " f"files {size_group[0].get_size_key()}", +1) else: if group_by_size: console.message( f"Processing one size group: {len(size_group)} " f"files {size_group[0].get_size_key()}", +1) # Within this size group, process exposure groups, or all exposures if not grouping groups_by_exposure = self.get_groups_by_exposure( size_group, data_model.get_group_by_exposure(), exposure_bandwidth) for exposure_group in groups_by_exposure: self.check_cancellation() (mean_exposure, _) = ImageMath.mean_exposure_and_temperature( exposure_group) console.push_level() if len(exposure_group) < minimum_group_size: if group_by_exposure: console.message( f"Ignoring one exposure group: {len(exposure_group)} " f"files exposed at mean {mean_exposure:.2f} seconds", +1) else: if group_by_exposure: console.message( f"Processing one exposure group: {len(exposure_group)} " f"files exposed at mean {mean_exposure:.2f} seconds", +1) # Within this exposure group, process temperature groups, or all temperatures if not grouping groups_by_temperature = \ self.get_groups_by_temperature(exposure_group, data_model.get_group_by_temperature(), temperature_bandwidth) for temperature_group in groups_by_temperature: self.check_cancellation() console.push_level() (_, mean_temperature ) = ImageMath.mean_exposure_and_temperature( temperature_group) if len(temperature_group) < minimum_group_size: if group_by_temperature: console.message( f"Ignoring one temperature group: {len(temperature_group)} " f"files with mean temperature {mean_temperature:.1f}", +1) else: if group_by_temperature: console.message( f"Processing one temperature group: {len(temperature_group)} " f"files with mean temperature {mean_temperature:.1f}", +1) # Now we have a list of descriptors, grouped as appropriate, to process self.process_one_group( data_model, temperature_group, output_directory, data_model.get_master_combine_method(), substituted_folder_name, console) self.check_cancellation() console.pop_level() console.pop_level() console.pop_level() console.message("Group combining complete", 0) console.pop_level()
def __init__(self, preferences: Preferences, data_model: DataModel): """Initialize MainWindow class""" self._preferences = preferences self._data_model = data_model QMainWindow.__init__(self) self.ui = uic.loadUi( MultiOsUtil.path_for_file_in_program_directory("MainWindow.ui")) self._field_validity: {object, bool} = {} self._table_model: FitsFileTableModel self._indent_level = 0 # Load algorithm from preferences algorithm = data_model.get_master_combine_method() if algorithm == Constants.COMBINE_MEAN: self.ui.combineMeanRB.setChecked(True) elif algorithm == Constants.COMBINE_MEDIAN: self.ui.combineMedianRB.setChecked(True) elif algorithm == Constants.COMBINE_MINMAX: self.ui.combineMinMaxRB.setChecked(True) else: assert (algorithm == Constants.COMBINE_SIGMA_CLIP) self.ui.combineSigmaRB.setChecked(True) self.ui.minMaxNumDropped.setText( str(data_model.get_min_max_number_clipped_per_end())) self.ui.sigmaThreshold.setText( str(data_model.get_sigma_clip_threshold())) # Load disposition from preferences disposition = data_model.get_input_file_disposition() if disposition == Constants.INPUT_DISPOSITION_SUBFOLDER: self.ui.dispositionSubFolderRB.setChecked(True) else: assert (disposition == Constants.INPUT_DISPOSITION_NOTHING) self.ui.dispositionNothingRB.setChecked(True) self.ui.subFolderName.setText( data_model.get_disposition_subfolder_name()) # Pre-calibration options precalibration_option = data_model.get_precalibration_type() if precalibration_option == Constants.CALIBRATION_FIXED_FILE: self.ui.fixedPreCalFileRB.setChecked(True) elif precalibration_option == Constants.CALIBRATION_NONE: self.ui.noPreClalibrationRB.setChecked(True) elif precalibration_option == Constants.CALIBRATION_AUTO_DIRECTORY: self.ui.autoPreCalibrationRB.setChecked(True) else: assert precalibration_option == Constants.CALIBRATION_PEDESTAL self.ui.fixedPedestalRB.setChecked(True) self.ui.fixedPedestalAmount.setText( str(data_model.get_precalibration_pedestal())) self.ui.precalibrationPathDisplay.setText( os.path.basename(data_model.get_precalibration_fixed_path())) self.ui.autoDirectoryName.setText( os.path.basename(data_model.get_precalibration_auto_directory())) self.ui.autoRecursive.setChecked( data_model.get_auto_directory_recursive()) self.ui.autoBiasOnly.setChecked( data_model.get_auto_directory_bias_only()) # Grouping boxes and parameters self.ui.groupBySizeCB.setChecked(data_model.get_group_by_size()) self.ui.groupByExposureCB.setChecked( data_model.get_group_by_exposure()) self.ui.groupByTemperatureCB.setChecked( data_model.get_group_by_temperature()) self.ui.ignoreSmallGroupsCB.setChecked( data_model.get_ignore_groups_fewer_than()) self.ui.exposureGroupBandwidth.setText( f"{data_model.get_exposure_group_bandwidth()}") self.ui.temperatureGroupBandwidth.setText( f"{data_model.get_temperature_group_bandwidth()}") self.ui.minimumGroupSize.setText( str(data_model.get_minimum_group_size())) # Set up the file table self._table_model = FitsFileTableModel( self.ui.filesTable, data_model.get_ignore_file_type()) self.ui.filesTable.setModel(self._table_model) # Columns should resize to best fit their contents self.ui.filesTable.horizontalHeader().setSectionResizeMode( QHeaderView.ResizeToContents) # Write a summary, in the main tab, of the settings from the options tab (and data model) self.fill_options_readout() self.connect_responders() # If a window size is saved, set the window size window_size = self._preferences.get_main_window_size() if window_size is not None: self.ui.resize(window_size) self.enable_fields() self.enable_buttons()
def combine_files(self, input_files: [FileDescriptor], data_model: DataModel, filter_name: str, output_path: str, console: Console): console.push_level() substituted_file_name = SharedUtils.substitute_date_time_filter_in_string( output_path) file_names = [d.get_absolute_path() for d in input_files] combine_method = data_model.get_master_combine_method() # Get info about any precalibration that is to be done calibrator = Calibrator(data_model) calibration_tag = calibrator.fits_comment_tag() assert len(input_files) > 0 binning: int = input_files[0].get_binning() (mean_exposure, mean_temperature ) = ImageMath.mean_exposure_and_temperature(input_files) if combine_method == Constants.COMBINE_MEAN: mean_data = ImageMath.combine_mean(file_names, calibrator, console, self._session_controller) self.check_cancellation() RmFitsUtil.create_combined_fits_file( substituted_file_name, mean_data, FileDescriptor.FILE_TYPE_DARK, "Dark Frame", mean_exposure, mean_temperature, filter_name, binning, f"Master Dark MEAN combined {calibration_tag}") elif combine_method == Constants.COMBINE_MEDIAN: median_data = ImageMath.combine_median(file_names, calibrator, console, self._session_controller) self.check_cancellation() RmFitsUtil.create_combined_fits_file( substituted_file_name, median_data, FileDescriptor.FILE_TYPE_DARK, "Dark Frame", mean_exposure, mean_temperature, filter_name, binning, f"Master Dark MEDIAN combined {calibration_tag}") elif combine_method == Constants.COMBINE_MINMAX: number_dropped_points = data_model.get_min_max_number_clipped_per_end( ) min_max_clipped_mean = ImageMath.combine_min_max_clip( file_names, number_dropped_points, calibrator, console, self._session_controller) self.check_cancellation() assert min_max_clipped_mean is not None RmFitsUtil.create_combined_fits_file( substituted_file_name, min_max_clipped_mean, FileDescriptor.FILE_TYPE_DARK, "Dark Frame", mean_exposure, mean_temperature, filter_name, binning, f"Master Dark Min/Max Clipped " f"(drop {number_dropped_points}) Mean combined" f" {calibration_tag}") else: assert combine_method == Constants.COMBINE_SIGMA_CLIP sigma_threshold = data_model.get_sigma_clip_threshold() sigma_clipped_mean = ImageMath.combine_sigma_clip( file_names, sigma_threshold, calibrator, console, self._session_controller) self.check_cancellation() assert sigma_clipped_mean is not None RmFitsUtil.create_combined_fits_file( substituted_file_name, sigma_clipped_mean, FileDescriptor.FILE_TYPE_DARK, "Dark Frame", mean_exposure, mean_temperature, filter_name, binning, f"Master Dark Sigma Clipped " f"(threshold {sigma_threshold}) Mean combined" f" {calibration_tag}") console.pop_level()