Exemplo n.º 1
0
 def combine_median(cls, file_names: [str], calibrator: Calibrator,
                    console: Console,
                    session_controller: SessionController) -> ndarray:
     """
     Combine the files in the given list using a simple median
     Check, as reading, that they all have the same dimensions
     :param file_names:          Names of files to be combined
     :param calibrator:          Calibration object, abstracting precalibration operations
     :param console:             Redirectable console output handler
     :param session_controller:  Controller for this subtask, checking for cancellation
     :return:                    ndarray giving the 2-dimensional matrix of resulting pixel values
     """
     assert len(
         file_names
     ) > 0  # Otherwise the combine button would have been disabled
     console.push_level()
     console.message("Combine by simple Median", +1)
     descriptors = RmFitsUtil.make_file_descriptions(file_names)
     file_data = RmFitsUtil.read_all_files_data(file_names)
     cls.check_cancellation(session_controller)
     file_data = calibrator.calibrate_images(file_data, descriptors,
                                             console, session_controller)
     cls.check_cancellation(session_controller)
     median_result = numpy.median(file_data, axis=0)
     console.pop_level()
     return median_result
Exemplo n.º 2
0
 def calibrate_with_file(
         self, file_data: [ndarray], calibration_file_path: str,
         console: Console,
         session_controller: SessionController) -> [ndarray]:
     """
     Calibrate given set of images by subtracting a fixed image file from each.
     (clipping at zero - no negative pixel values will be produced)
     Given calibration file must be the same size as all the images to be calibrated.
     :param file_data:               List of images' data. Each is 2d image matrix
     :param calibration_file_path:   Full path to calibration file
     :param console:                 Redirectable console output object
     :param session_controller:      Controller for this subtask
     :return:                        List of calibrated images
     """
     console.message(f"Calibrate with file: {calibration_file_path}", 0)
     result = file_data.copy()
     calibration_image = RmFitsUtil.fits_data_from_path(
         calibration_file_path)
     (calibration_x, calibration_y) = calibration_image.shape
     for index in range(len(result)):
         if session_controller.thread_cancelled():
             raise MasterMakerExceptions.SessionCancelled
         (layer_x, layer_y) = result[index].shape
         if (layer_x != calibration_x) or (layer_y != calibration_y):
             raise MasterMakerExceptions.IncompatibleSizes
         difference = result[index] - calibration_image
         result[index] = difference.clip(0, 0xFFFF)
     return result
Exemplo n.º 3
0
    def validate_file_dimensions(cls, descriptors: [FileDescriptor],
                                 data_model: DataModel) -> bool:
        # Get list of paths of selected files
        if len(descriptors) > 0:

            # If precalibration file is in use, add that name to the list
            if data_model.get_precalibration_type(
            ) == Constants.CALIBRATION_FIXED_FILE:
                calibration_descriptor = \
                    RmFitsUtil.make_file_descriptor(data_model.get_precalibration_fixed_path())
                descriptors.append(calibration_descriptor)

            # Get binning and dimension of first to use as a reference
            assert len(descriptors) > 0
            reference_file: FileDescriptor = descriptors[0]
            reference_binning = reference_file.get_binning()
            reference_x_size = reference_file.get_x_dimension()
            reference_y_size = reference_file.get_y_dimension()

            # Check all files in the list against these specifications
            descriptor: FileDescriptor
            for descriptor in descriptors:
                if descriptor.get_binning() != reference_binning:
                    return False
                if descriptor.get_x_dimension() != reference_x_size:
                    return False
                if descriptor.get_y_dimension() != reference_y_size:
                    return False

        return True
Exemplo n.º 4
0
 def pick_files_button_clicked(self):
     """'Pick Files' button or 'Open' menu item are selected.  Get the input files from the user."""
     dialog = QFileDialog()
     file_names, _ = QFileDialog.getOpenFileNames(
         dialog,
         "Pick Files",
         "",
         f"FITS files(*.fit)",
         # options=QFileDialog.ReadOnly | QFileDialog.DontUseNativeDialog)
         options=QFileDialog.ReadOnly)
     if len(file_names) == 0:
         # User clicked "cancel"
         pass
     else:
         try:
             file_descriptions = RmFitsUtil.make_file_descriptions(
                 file_names)
             self._table_model.set_file_descriptors(file_descriptions)
             self._table_model.sort(0, PyQt5.QtCore.Qt.AscendingOrder
                                    )  # Column 0, ascending order
         except FileNotFoundError as exception:
             self.error_dialog(
                 "File Not Found",
                 f"File \"{exception.filename}\" was not found or not readable"
             )
     self.enable_buttons()
Exemplo n.º 5
0
 def combine_median(cls, file_names: [str], calibrator: Calibrator,
                    console: Console,
                    session_controller: SessionController) -> ndarray:
     assert len(
         file_names
     ) > 0  # Otherwise the combine button would have been disabled
     console.push_level()
     console.message("Combine by simple Median", +1)
     file_data = RmFitsUtil.read_all_files_data(file_names)
     cls.check_cancellation(session_controller)
     sample_file = RmFitsUtil.make_file_descriptor(file_names[0])
     file_data = calibrator.calibrate_images(file_data, sample_file,
                                             console, session_controller)
     cls.check_cancellation(session_controller)
     median_result = numpy.median(file_data, axis=0)
     console.pop_level()
     return median_result
Exemplo n.º 6
0
 def get_adu_values_for_descriptors(self, descriptors: [FileDescriptor]):
     """
     Calculate and store the Average ADU figure for each file in the given list
     :param descriptors:     List of descriptors for files to be processed
     """
     for descriptor in descriptors:
         path = descriptor.get_absolute_path()
         adus = RmFitsUtil.get_average_adus(path)
         descriptor.set_average_adus(adus)
Exemplo n.º 7
0
    def calibrate_with_auto_directory(
            self, file_data: [ndarray], auto_directory_path: str,
            descriptors: [FileDescriptor], console: Console,
            session_controller: SessionController) -> [ndarray]:
        """
        Calibrate the given files' contents, each with the best-matching calibration file
        from a directory.  "Best" is measured by trying to match both the exposure time
        and temperature, with more weight to the exposure time.  A separate file is chosen
        for each input image, since the exposure times of collected flats often vary
        during the collection session, to keep the ADU level constant as the light changes.
        :param file_data:               List of images' data (list of 2-d matrix of pixel values)
        :param auto_directory_path:     Path to folder of calibration images
        :param descriptors:             Descs of files corresponding to the given images
        :param console:                 Redirectable console output object
        :param session_controller:      Controller for this subtask
        :return:                        List of calibrated images
        """
        assert len(file_data) > 0
        assert len(file_data) == len(descriptors)

        # Get all calibration files from directory so we only have to read it once
        directory_files = self.all_descriptors_from_directory(
            auto_directory_path,
            self._data_model.get_auto_directory_recursive())
        if session_controller.thread_cancelled():
            raise MasterMakerExceptions.SessionCancelled
        if len(directory_files) == 0:
            # No files in that directory, raise exception
            raise MasterMakerExceptions.AutoCalibrationDirectoryEmpty(
                auto_directory_path)

        console.push_level()
        console.message(
            f"Calibrating from directory containing {len(directory_files)} files.",
            +1)
        result = file_data.copy()
        for input_index in range(len(descriptors)):
            if session_controller.thread_cancelled():
                raise MasterMakerExceptions.SessionCancelled
            this_file: FileDescriptor = descriptors[input_index]
            calibration_file = self.get_best_calibration_file(
                directory_files, this_file, session_controller, console)
            if session_controller.thread_cancelled():
                raise MasterMakerExceptions.SessionCancelled
            calibration_image = RmFitsUtil.fits_data_from_path(
                calibration_file)
            (calibration_x, calibration_y) = calibration_image.shape
            (layer_x, layer_y) = result[input_index].shape
            if (layer_x != calibration_x) or (layer_y != calibration_y):
                raise MasterMakerExceptions.IncompatibleSizes
            difference = result[input_index] - calibration_image
            result[input_index] = difference.clip(0, 0xFFFF)
        console.pop_level()
        return result
Exemplo n.º 8
0
 def all_descriptors_from_directory(self, directory_path: str,
                                    recursive: bool) -> [FileDescriptor]:
     """
     Get file-descriptors for all the files in the specified directory
     :param directory_path:  Absolute path to directory to be scanned
     :param recursive:       If True, also recursively scann all sub-directories
     :return:                Array of file descriptors for contents
     """
     paths: [str] = SharedUtils.files_in_directory(directory_path,
                                                   recursive)
     descriptors = RmFitsUtil.make_file_descriptions(paths)
     return descriptors
 def process_files(self, file_names: [str], output_path: str, groups_output_directory: str) -> bool:
     """Process all the files listed in the command line, with the given combination settings"""
     success = True
     file_descriptors = RmFitsUtil.make_file_descriptions(file_names)
     # check types are all dark
     if self._data_model.get_ignore_file_type() \
             or FileCombiner.all_of_type(file_descriptors, FileDescriptor.FILE_TYPE_DARK):
         output_file_path = self.make_output_path(output_path, file_descriptors)
         self.run_combination_session(file_descriptors, output_file_path, groups_output_directory)
     else:
         print("Files are not all Dark files.  (Use -t option to suppress this check.)")
         success = False
     return success
Exemplo n.º 10
0
    def combine_mean(cls, file_names: [str], calibrator: Calibrator,
                     console: Console,
                     session_controller: SessionController) -> ndarray:
        """Combine FITS files in given list using simple mean.  Return an ndarray containing the combined data."""
        assert len(
            file_names
        ) > 0  # Otherwise the combine button would have been disabled
        console.push_level()
        console.message("Combining by simple mean", +1)
        sample_file = RmFitsUtil.make_file_descriptor(file_names[0])
        file_data: [ndarray]
        file_data = RmFitsUtil.read_all_files_data(file_names)

        cls.check_cancellation(session_controller)
        calibrated_data = calibrator.calibrate_images(file_data, sample_file,
                                                      console,
                                                      session_controller)

        cls.check_cancellation(session_controller)
        mean_result = numpy.mean(calibrated_data, axis=0)
        console.pop_level()
        return mean_result
Exemplo n.º 11
0
 def calibrate_with_file(self, file_data: [ndarray], calibration_file_path: str, console: Console,
                         session_controller: SessionController) -> [ndarray]:
     console.message(f"Calibrate with file: {calibration_file_path}", 0)
     result = file_data.copy()
     calibration_image = RmFitsUtil.fits_data_from_path(calibration_file_path)
     (calibration_x, calibration_y) = calibration_image.shape
     for index in range(len(result)):
         if session_controller.thread_cancelled():
             break
         (layer_x, layer_y) = result[index].shape
         if (layer_x != calibration_x) or (layer_y != calibration_y):
             raise MasterMakerExceptions.IncompatibleSizes
         difference = result[index] - calibration_image
         result[index] = difference.clip(0, 0xFFFF)
     return result
Exemplo n.º 12
0
    def validate_file_dimensions(cls, descriptors: [FileDescriptor],
                                 data_model: DataModel) -> bool:
        """
        Determine if the dimensions of all the supplied files are the same.
        All selected files must be the same size and the same binning.
        Include the precalibration bias or dark file in this test if that method is selected.

        :param descriptors:     Files to be checked for compatibility
        :param data_model:      Data model gives precalibration type and file if needed
        :return:                True if all files are the same size and binning, so compatible
        """
        # Get list of paths of selected files
        if len(descriptors) > 0:

            # If precalibration file is in use, add that name to the list
            if data_model.get_precalibration_type(
            ) == Constants.CALIBRATION_FIXED_FILE:
                calibration_descriptor = \
                    RmFitsUtil.make_file_descriptor(data_model.get_precalibration_fixed_path())
                descriptors.append(calibration_descriptor)

            # Get binning and dimension of first to use as a reference
            assert len(descriptors) > 0
            reference_file: FileDescriptor = descriptors[0]
            reference_binning = reference_file.get_binning()
            reference_x_size = reference_file.get_x_dimension()
            reference_y_size = reference_file.get_y_dimension()

            # Check all files in the list against these specifications
            descriptor: FileDescriptor
            for descriptor in descriptors:
                if descriptor.get_binning() != reference_binning:
                    return False
                if descriptor.get_x_dimension() != reference_x_size:
                    return False
                if descriptor.get_y_dimension() != reference_y_size:
                    return False

        return True
 def process_files(self, file_names: [str], output_path: str,
                   groups_output_directory: str) -> bool:
     """
     Process all the files listed in the command line, with the given combination settings
     :param file_names:                  List of file path names to be processed
     :param output_path:                 Path where output is to be placed
     :param groups_output_directory:     Path for output directory if grouping option is used
     :return:                            Success indicator
     """
     success = True
     file_descriptors = RmFitsUtil.make_file_descriptions(file_names)
     # check types are all Flat
     if self._data_model.get_ignore_file_type() \
             or FileCombiner.all_of_type(file_descriptors, FileDescriptor.FILE_TYPE_FLAT):
         output_file_path = self.make_output_path(output_path,
                                                  file_descriptors)
         self.run_combination_session(file_descriptors, output_file_path,
                                      groups_output_directory)
     else:
         print(
             "Files are not all Flat files.  (Use -t option to suppress this check.)"
         )
         success = False
     return success
Exemplo n.º 14
0
 def all_descriptors_from_directory(self, directory_path: str,
                                    recursive: bool) -> [FileDescriptor]:
     paths: [str] = SharedUtils.files_in_directory(directory_path, recursive)
     descriptors = RmFitsUtil.make_file_descriptions(paths)
     return descriptors
Exemplo n.º 15
0
    def combine_files(self, input_files: [FileDescriptor],
                      data_model: DataModel, filter_name: str,
                      output_path: str, console: Console):
        """
        Combine the given files, output to the given output file using the combination
        method defined in the data model.

        :param input_files:     List of files to be combined
        :param data_model:      Data model with options for this run
        :param filter_name:     Human-readable filter name (for output file name and FITS comment)
        :param output_path:     Path for output fiel to be created
        :param console:         Redirectable console output object
        """
        console.push_level(
        )  # Stack console indentation level to easily restore when done
        substituted_file_name = SharedUtils.substitute_date_time_filter_in_string(
            output_path)
        file_names = [d.get_absolute_path() for d in input_files]
        combine_method = data_model.get_master_combine_method()
        # Get info about any precalibration that is to be done
        calibrator = Calibrator(data_model)
        calibration_tag = calibrator.fits_comment_tag()
        assert len(input_files) > 0
        binning: int = input_files[0].get_binning()
        (mean_exposure, mean_temperature
         ) = ImageMath.mean_exposure_and_temperature(input_files)
        if combine_method == Constants.COMBINE_MEAN:
            mean_data = ImageMath.combine_mean(file_names, calibrator, console,
                                               self._session_controller)
            self.check_cancellation()
            RmFitsUtil.create_combined_fits_file(
                substituted_file_name, mean_data,
                FileDescriptor.FILE_TYPE_FLAT, "Flat Frame", mean_exposure,
                mean_temperature, filter_name, binning,
                f"Master Flat MEAN combined {calibration_tag}")
        elif combine_method == Constants.COMBINE_MEDIAN:
            median_data = ImageMath.combine_median(file_names, calibrator,
                                                   console,
                                                   self._session_controller)
            self.check_cancellation()
            RmFitsUtil.create_combined_fits_file(
                substituted_file_name, median_data,
                FileDescriptor.FILE_TYPE_FLAT, "Flat Frame", mean_exposure,
                mean_temperature, filter_name, binning,
                f"Master Flat MEDIAN combined {calibration_tag}")
        elif combine_method == Constants.COMBINE_MINMAX:
            number_dropped_points = data_model.get_min_max_number_clipped_per_end(
            )
            min_max_clipped_mean = ImageMath.combine_min_max_clip(
                file_names, number_dropped_points, calibrator, console,
                self._session_controller)
            self.check_cancellation()
            assert min_max_clipped_mean is not None
            RmFitsUtil.create_combined_fits_file(
                substituted_file_name, min_max_clipped_mean,
                FileDescriptor.FILE_TYPE_FLAT, "Flat Frame", mean_exposure,
                mean_temperature, filter_name, binning,
                f"Master Flat Min/Max Clipped "
                f"(drop {number_dropped_points}) Mean combined"
                f" {calibration_tag}")
        else:
            assert combine_method == Constants.COMBINE_SIGMA_CLIP
            sigma_threshold = data_model.get_sigma_clip_threshold()
            sigma_clipped_mean = ImageMath.combine_sigma_clip(
                file_names, sigma_threshold, calibrator, console,
                self._session_controller)
            self.check_cancellation()
            assert sigma_clipped_mean is not None
            RmFitsUtil.create_combined_fits_file(
                substituted_file_name, sigma_clipped_mean,
                FileDescriptor.FILE_TYPE_FLAT, "Flat Frame", mean_exposure,
                mean_temperature, filter_name, binning,
                f"Master Flat Sigma Clipped "
                f"(threshold {sigma_threshold}) Mean combined"
                f" {calibration_tag}")
        console.pop_level()
Exemplo n.º 16
0
    def combine_min_max_clip(
            cls, file_names: [str], number_dropped_values: int,
            calibrator: Calibrator, console: Console,
            session_controller: SessionController) -> Optional[ndarray]:
        """Combine FITS files in given list using min/max-clipped mean.
        Return an ndarray containing the combined data."""
        success: bool
        assert len(
            file_names
        ) > 0  # Otherwise the combine button would have been disabled
        # Get the data to be processed
        file_data_list: [ndarray] = RmFitsUtil.read_all_files_data(file_names)
        cls.check_cancellation(session_controller)
        file_data = numpy.asarray(file_data_list)
        sample_file = RmFitsUtil.make_file_descriptor(file_names[0])
        file_data = calibrator.calibrate_images(file_data, sample_file,
                                                console, session_controller)
        cls.check_cancellation(session_controller)
        # Do the math using each algorithm, and display how long it takes

        # time_before_0 = datetime.now()
        # result0 = cls.min_max_clip_version_0(file_data, number_dropped_values, progress_dots)
        # time_after_0 = datetime.now()
        # duration_0 = time_after_0 - time_before_0
        #
        # time_before_1 = datetime.now()
        # result1 = cls.min_max_clip_version_1(file_data, number_dropped_values, progress_dots)
        # time_after_1 = datetime.now()
        # duration_1 = time_after_1 - time_before_1
        #
        # time_before_2 = datetime.now()
        # result2 = cls.min_max_clip_version_2(file_data, number_dropped_values, progress_dots)
        # time_after_2 = datetime.now()
        # duration_2 = time_after_2 - time_before_2
        #
        # time_before_3 = datetime.now()
        # result3 = cls.min_max_clip_version_3(file_data, number_dropped_values, progress_dots)
        # time_after_3 = datetime.now()
        # duration_3 = time_after_3 - time_before_3
        #
        # time_before_4 = datetime.now()
        # result4 = cls.min_max_clip_version_4(file_data, number_dropped_values)
        # time_after_4 = datetime.now()
        # duration_4 = time_after_4 - time_before_4
        #
        # time_before_5 = datetime.now()
        # result5 = cls.min_max_clip_version_5(file_data, number_dropped_values)
        # time_after_5 = datetime.now()
        # duration_5 = time_after_5 - time_before_5
        #
        # print(f"Method 0 time: {duration_0}")
        # print(f"Method 1 time: {duration_1}")
        # print(f"Method 2 time: {duration_2}")
        # print(f"Method 3 time: {duration_3}")
        # print(f"Method 4 time: {duration_4}")
        # print(f"Method 5 time: {duration_5}")
        #
        # # Also ensure that the different algorithm versions produced exactly the same result
        # # Using method-0 as the reference
        # cls.compare_results(result0, result1, "1")
        # cls.compare_results(result0, result2, "2")
        # cls.compare_results(result0, result3, "3")
        # cls.compare_results(result0, result4, "4", dump=False)
        # cls.compare_results(result0, result5, "5")
        #
        # return result0
        result5 = cls.min_max_clip_version_5(file_data, number_dropped_values,
                                             console, session_controller)
        cls.check_cancellation(session_controller)
        result = result5.filled()
        return result
Exemplo n.º 17
0
 def combine_files(self, input_files: [FileDescriptor],
                   data_model: DataModel, filter_name: str,
                   output_path: str, console: Console):
     console.push_level()
     substituted_file_name = SharedUtils.substitute_date_time_filter_in_string(
         output_path)
     file_names = [d.get_absolute_path() for d in input_files]
     combine_method = data_model.get_master_combine_method()
     # Get info about any precalibration that is to be done
     calibrator = Calibrator(data_model)
     calibration_tag = calibrator.fits_comment_tag()
     assert len(input_files) > 0
     binning: int = input_files[0].get_binning()
     (mean_exposure, mean_temperature
      ) = ImageMath.mean_exposure_and_temperature(input_files)
     if combine_method == Constants.COMBINE_MEAN:
         mean_data = ImageMath.combine_mean(file_names, calibrator, console,
                                            self._session_controller)
         self.check_cancellation()
         RmFitsUtil.create_combined_fits_file(
             substituted_file_name, mean_data,
             FileDescriptor.FILE_TYPE_DARK, "Dark Frame", mean_exposure,
             mean_temperature, filter_name, binning,
             f"Master Dark MEAN combined {calibration_tag}")
     elif combine_method == Constants.COMBINE_MEDIAN:
         median_data = ImageMath.combine_median(file_names, calibrator,
                                                console,
                                                self._session_controller)
         self.check_cancellation()
         RmFitsUtil.create_combined_fits_file(
             substituted_file_name, median_data,
             FileDescriptor.FILE_TYPE_DARK, "Dark Frame", mean_exposure,
             mean_temperature, filter_name, binning,
             f"Master Dark MEDIAN combined {calibration_tag}")
     elif combine_method == Constants.COMBINE_MINMAX:
         number_dropped_points = data_model.get_min_max_number_clipped_per_end(
         )
         min_max_clipped_mean = ImageMath.combine_min_max_clip(
             file_names, number_dropped_points, calibrator, console,
             self._session_controller)
         self.check_cancellation()
         assert min_max_clipped_mean is not None
         RmFitsUtil.create_combined_fits_file(
             substituted_file_name, min_max_clipped_mean,
             FileDescriptor.FILE_TYPE_DARK, "Dark Frame", mean_exposure,
             mean_temperature, filter_name, binning,
             f"Master Dark Min/Max Clipped "
             f"(drop {number_dropped_points}) Mean combined"
             f" {calibration_tag}")
     else:
         assert combine_method == Constants.COMBINE_SIGMA_CLIP
         sigma_threshold = data_model.get_sigma_clip_threshold()
         sigma_clipped_mean = ImageMath.combine_sigma_clip(
             file_names, sigma_threshold, calibrator, console,
             self._session_controller)
         self.check_cancellation()
         assert sigma_clipped_mean is not None
         RmFitsUtil.create_combined_fits_file(
             substituted_file_name, sigma_clipped_mean,
             FileDescriptor.FILE_TYPE_DARK, "Dark Frame", mean_exposure,
             mean_temperature, filter_name, binning,
             f"Master Dark Sigma Clipped "
             f"(threshold {sigma_threshold}) Mean combined"
             f" {calibration_tag}")
     console.pop_level()
Exemplo n.º 18
0
    def combine_sigma_clip(
            cls, file_names: [str], sigma_threshold: float,
            calibrator: Calibrator, console: Console,
            session_controller: SessionController) -> Optional[ndarray]:
        console.push_level()
        console.message(
            f"Combine by sigma-clipped mean, z-score threshold {sigma_threshold}",
            +1)
        sample_file = RmFitsUtil.make_file_descriptor(file_names[0])

        file_data = numpy.asarray(RmFitsUtil.read_all_files_data(file_names))
        cls.check_cancellation(session_controller)

        file_data = calibrator.calibrate_images(file_data, sample_file,
                                                console, session_controller)
        cls.check_cancellation(session_controller)

        console.message("Calculating unclipped means", +1)
        column_means = numpy.mean(file_data, axis=0)
        cls.check_cancellation(session_controller)

        console.message("Calculating standard deviations", 0)
        column_stdevs = numpy.std(file_data, axis=0)
        cls.check_cancellation(session_controller)
        console.message("Calculating z-scores", 0)
        # Now what we'd like to do is just:
        #    z_scores = abs(file_data - column_means) / column_stdevs
        # Unfortunately, standard deviations can be zero, so that simplistic
        # statement would generate division-by-zero errors.
        # Std for a column would be zero if all the values in the column were identical.
        # In that case we wouldn't want to eliminate any anyway, so we'll set the
        # zero stdevs to a large number, which causes the z-scores to be small, which
        # causes no values to be eliminated.
        column_stdevs[column_stdevs == 0.0] = sys.float_info.max
        z_scores = abs(file_data - column_means) / column_stdevs
        cls.check_cancellation(session_controller)

        console.message("Eliminated data outside threshold", 0)
        exceeds_threshold = z_scores > sigma_threshold
        cls.check_cancellation(session_controller)

        # Calculate and display how much data we are ignoring
        dimensions = exceeds_threshold.shape
        total_pixels = dimensions[0] * dimensions[1] * dimensions[2]
        number_masked = numpy.count_nonzero(exceeds_threshold)
        percentage_masked = 100.0 * number_masked / total_pixels
        console.message(
            f"Discarded {number_masked:,} pixels of {total_pixels:,} "
            f"({percentage_masked:.3f}% of data)", +1)

        masked_array = ma.masked_array(file_data, exceeds_threshold)
        cls.check_cancellation(session_controller)
        console.message("Calculating adjusted means", -1)
        masked_means = ma.mean(masked_array, axis=0)
        cls.check_cancellation(session_controller)

        # If the means matrix contains any masked values, that means that in that column the clipping
        # eliminated *all* the data.  We will find the offending columns and re-calculate those using
        # simple min-max clipping.
        if ma.is_masked(masked_means):
            console.message(
                "Some columns lost all their values; min-max clipping those columns.",
                0)
            #  Get the mask, and get a 2D matrix showing which columns were entirely masked
            eliminated_columns_map = ndarray.all(exceeds_threshold, axis=0)
            masked_coordinates = numpy.where(eliminated_columns_map)
            x_coordinates = masked_coordinates[0]
            y_coordinates = masked_coordinates[1]
            assert len(x_coordinates) == len(y_coordinates)
            for index in range(len(x_coordinates)):
                cls.check_cancellation(session_controller)
                column_x = x_coordinates[index]
                column_y = y_coordinates[index]
                column = file_data[:, column_x, column_y]
                min_max_clipped_mean: int = round(
                    cls.calc_mm_clipped_mean(column, 2, console,
                                             session_controller))
                masked_means[column_x, column_y] = min_max_clipped_mean
            # We've replaced the problematic columns, now the mean should calculate cleanly
            assert not ma.is_masked(masked_means)
        cls.check_cancellation(session_controller)
        console.pop_level()
        result = masked_means.round().filled()
        return result
Exemplo n.º 19
0
    def combine_min_max_clip(
            cls, file_names: [str], number_dropped_values: int,
            calibrator: Calibrator, console: Console,
            session_controller: SessionController) -> Optional[ndarray]:
        """
        Combine the files in the given list using min-max clip algorithm
        Check, as reading, that they all have the same dimensions
        :param file_names:              Names of files to be combined
        :param number_dropped_values    Number of min and max values to drop from each column
        :param calibrator:              Calibration object, abstracting precalibration operations
        :param console:                 Redirectable console output handler
        :param session_controller:      Controller for this subtask, checking for cancellation
        :return:                        ndarray giving the 2-dimensional matrix of resulting pixel values
        """
        success: bool
        assert len(
            file_names
        ) > 0  # Otherwise the combine button would have been disabled
        # Get the data to be processed
        file_data_list: [ndarray] = RmFitsUtil.read_all_files_data(file_names)
        cls.check_cancellation(session_controller)
        descriptors = RmFitsUtil.make_file_descriptions(file_names)
        file_data = numpy.asarray(file_data_list)
        file_data = calibrator.calibrate_images(file_data, descriptors,
                                                console, session_controller)
        cls.check_cancellation(session_controller)
        # Do the math using each algorithm, and display how long it takes

        # time_before_0 = datetime.now()
        # result0 = cls.min_max_clip_version_0(file_data, number_dropped_values, progress_dots)
        # time_after_0 = datetime.now()
        # duration_0 = time_after_0 - time_before_0
        #
        # time_before_1 = datetime.now()
        # result1 = cls.min_max_clip_version_1(file_data, number_dropped_values, progress_dots)
        # time_after_1 = datetime.now()
        # duration_1 = time_after_1 - time_before_1
        #
        # time_before_2 = datetime.now()
        # result2 = cls.min_max_clip_version_2(file_data, number_dropped_values, progress_dots)
        # time_after_2 = datetime.now()
        # duration_2 = time_after_2 - time_before_2
        #
        # time_before_3 = datetime.now()
        # result3 = cls.min_max_clip_version_3(file_data, number_dropped_values, progress_dots)
        # time_after_3 = datetime.now()
        # duration_3 = time_after_3 - time_before_3
        #
        # time_before_4 = datetime.now()
        # result4 = cls.min_max_clip_version_4(file_data, number_dropped_values)
        # time_after_4 = datetime.now()
        # duration_4 = time_after_4 - time_before_4
        #
        # time_before_5 = datetime.now()
        # result5 = cls.min_max_clip_version_5(file_data, number_dropped_values)
        # time_after_5 = datetime.now()
        # duration_5 = time_after_5 - time_before_5
        #
        # print(f"Method 0 time: {duration_0}")
        # print(f"Method 1 time: {duration_1}")
        # print(f"Method 2 time: {duration_2}")
        # print(f"Method 3 time: {duration_3}")
        # print(f"Method 4 time: {duration_4}")
        # print(f"Method 5 time: {duration_5}")
        #
        # # Also ensure that the different algorithm versions produced exactly the same result
        # # Using method-0 as the reference
        # cls.compare_results(result0, result1, "1")
        # cls.compare_results(result0, result2, "2")
        # cls.compare_results(result0, result3, "3")
        # cls.compare_results(result0, result4, "4", dump=False)
        # cls.compare_results(result0, result5, "5")
        #
        # return result0
        result5 = cls.min_max_clip_version_5(file_data, number_dropped_values,
                                             console, session_controller)
        cls.check_cancellation(session_controller)
        result = result5.filled()
        return result