Exemplo n.º 1
0
def workflow(input_name,
             input_type='video',
             roi=None,
             automatic_ap_creation=True):
    """
    Execute the whole stacking workflow for a test case. This can either use a video file (.avi .mov .mp4 .ser)
    or still images stored in a single directory.

    :param input_name: Video file (.avi .mov .mp4 .ser) or name of a directory containing still images
    :param input_type: Either "video" or "image" (see "input_name")
    :param roi: If specified, tuple (y_low, y_high, x_low, x_high) with pixel bounds for "region
                of interest"
    :return: average, [average_roi,] color_image_with_aps, stacked_image
             with: - average: global mean frame
                   - average_roi: mean frame restricted to ROI (only if roi is specified)
                   - color_image_with_aps: mean frame overlaid with alignment points and their
                                           boxes (white) and patches (green)
    """

    # Initalize the timer object used to measure execution times of program sections.
    my_timer = timer()
    # Images can either be extracted from a video file or a batch of single photographs. Select
    # the example for the test run.

    # For video file input, the Frames constructor expects the video file name for "names".
    if input_type == 'video':
        names = input_name
    # For single image input, the Frames constructor expects a list of image file names for "names".
    else:
        names = [
            os.path.join(input_name, name) for name in os.listdir(input_name)
        ]
    stacked_image_name = input_name + '.stacked.tiff'

    # The name of the alignment point visualization file is derived from the input video name or
    # the input directory name.
    ap_image_name = input_name + ".aps.tiff"

    print(
        "\n" +
        "*************************************************************************************\n"
        + "Start processing " + str(input_name) +
        "\n*************************************************************************************"
    )
    my_timer.create('Execution over all')

    # Get configuration parameters.
    configuration = Configuration()
    configuration.initialize_configuration()

    # Read the frames.
    print("+++ Start reading frames")
    my_timer.create('Read all frames')
    try:
        frames = Frames(configuration, names, type=input_type)
        print("Number of images read: " + str(frames.number))
        print("Image shape: " + str(frames.shape))
    except Error as e:
        print("Error: " + str(e))
        exit()
    my_timer.stop('Read all frames')

    # Rank the frames by their overall local contrast.
    print("+++ Start ranking images")
    my_timer.create('Ranking images')
    rank_frames = RankFrames(frames, configuration)
    rank_frames.frame_score()
    my_timer.stop('Ranking images')
    print("Index of best frame: " + str(rank_frames.frame_ranks_max_index))

    # Initialize the frame alignment object.
    align_frames = AlignFrames(frames, rank_frames, configuration)

    if configuration.align_frames_mode == "Surface":
        my_timer.create('Select optimal alignment patch')
        # Select the local rectangular patch in the image where the L gradient is highest in both x
        # and y direction. The scale factor specifies how much smaller the patch is compared to the
        # whole image frame.
        (y_low_opt, y_high_opt, x_low_opt,
         x_high_opt) = align_frames.compute_alignment_rect(
             configuration.align_frames_rectangle_scale_factor)
        my_timer.stop('Select optimal alignment patch')

        print("optimal alignment rectangle, y_low: " + str(y_low_opt) +
              ", y_high: " + str(y_high_opt) + ", x_low: " + str(x_low_opt) +
              ", x_high: " + str(x_high_opt))

    # Align all frames globally relative to the frame with the highest score.
    print("+++ Start aligning all frames")
    my_timer.create('Global frame alignment')
    try:
        align_frames.align_frames()
    except NotSupportedError as e:
        print("Error: " + e.message)
        exit()
    except InternalError as e:
        print("Warning: " + e.message)
    my_timer.stop('Global frame alignment')

    print("Intersection, y_low: " + str(align_frames.intersection_shape[0][0]) + ", y_high: "
          + str(align_frames.intersection_shape[0][1]) + ", x_low: " \
          + str(align_frames.intersection_shape[1][0]) + ", x_high: " \
          + str(align_frames.intersection_shape[1][1]))

    # Compute the average frame.
    print("+++ Start computing reference frame")
    my_timer.create('Compute reference frame')
    average = align_frames.average_frame()
    my_timer.stop('Compute reference frame')
    print("Reference frame computed from the best " +
          str(align_frames.average_frame_number) + " frames.")

    # If the ROI is to be set to a smaller size than the whole intersection, do so.
    if roi:
        print("+++ Start setting ROI and computing new reference frame")
        my_timer.create('Setting ROI and new reference')
        average_roi = align_frames.set_roi(roi[0], roi[1], roi[2], roi[3])
        my_timer.stop('Setting ROI and new reference')

    # Initialize the AlignmentPoints object.
    my_timer.create('Initialize alignment point object')
    alignment_points = AlignmentPoints(configuration, frames, rank_frames,
                                       align_frames)
    my_timer.stop('Initialize alignment point object')

    if automatic_ap_creation:
        # Create alignment points, and create an image with wll alignment point boxes and patches.
        print("+++ Start creating alignment points")
        my_timer.create('Create alignment points')

        # If a ROI is selected, alignment points are created in the ROI window only.
        alignment_points.create_ap_grid()

        my_timer.stop('Create alignment points')
        print("Number of alignment points selected: " +
              str(len(alignment_points.alignment_points)) +
              ", aps dropped because too dim: " +
              str(alignment_points.alignment_points_dropped_dim) +
              ", aps dropped because too little structure: " +
              str(alignment_points.alignment_points_dropped_structure))
    else:
        # Open the alignment point editor.
        app = QtWidgets.QApplication(sys.argv)
        alignment_point_editor = AlignmentPointEditorWidget(
            None, configuration, align_frames, alignment_points, None)
        alignment_point_editor.setMinimumSize(800, 600)
        alignment_point_editor.showMaximized()
        app.exec_()

        print("After AP editing, number of APs: " +
              str(len(alignment_points.alignment_points)))
        count_updates = 0
        for ap in alignment_points.alignment_points:
            if ap['reference_box'] is not None:
                continue
            count_updates += 1
            AlignmentPoints.set_reference_box(ap, alignment_points.mean_frame)
        print("Buffers allocated for " + str(count_updates) +
              " alignment points.")

    # Produce an overview image showing all alignment points.
    if roi:
        color_image_with_aps = alignment_points.show_alignment_points(
            average_roi)
    else:
        color_image_with_aps = alignment_points.show_alignment_points(average)

    # For each alignment point rank frames by their quality.
    my_timer.create('Rank frames at alignment points')
    print("+++ Start ranking frames at alignment points")
    alignment_points.compute_frame_qualities()
    my_timer.stop('Rank frames at alignment points')

    # Allocate StackFrames object.
    stack_frames = StackFrames(configuration, frames, rank_frames,
                               align_frames, alignment_points, my_timer)

    # Stack all frames.
    print("+++ Start stacking frames")
    stack_frames.stack_frames()

    # Merge the stacked alignment point buffers into a single image.
    print("+++ Start merging alignment patches")
    stacked_image = stack_frames.merge_alignment_point_buffers()

    # If the drizzle factor is 1.5, reduce the pixel resolution of the stacked image buffer
    # to half the size used in stacking.
    if configuration.drizzle_factor_is_1_5:
        print("+++ Start reducing image buffer size")
        stack_frames.half_stacked_image_buffer_resolution()

    # Save the stacked image as 16bit int (color or mono).
    my_timer.create('Saving the final image')
    Frames.save_image(stacked_image_name,
                      stacked_image,
                      color=frames.color,
                      header=configuration.global_parameters_version)
    my_timer.stop('Saving the final image')

    # Print out timer results.
    my_timer.stop('Execution over all')
    my_timer.print()

    # Write the image with alignment points.
    Frames.save_image(ap_image_name,
                      color_image_with_aps,
                      color=True,
                      header=configuration.global_parameters_version)

    # If a ROI is selected, return both the original and the reduced-size average frame.
    if roi:
        return average, average_roi, color_image_with_aps, stacked_image
    else:
        return average, color_image_with_aps, stacked_image
Exemplo n.º 2
0
class Workflow(QtCore.QObject):
    master_dark_created_signal = QtCore.pyqtSignal(bool)
    master_flat_created_signal = QtCore.pyqtSignal(bool)
    work_next_task_signal = QtCore.pyqtSignal(str)
    work_current_progress_signal = QtCore.pyqtSignal(str, int)
    set_main_gui_busy_signal = QtCore.pyqtSignal(bool)
    set_status_bar_signal = QtCore.pyqtSignal(str, str)
    create_image_window_signal = QtCore.pyqtSignal()
    update_image_window_signal = QtCore.pyqtSignal(object)
    terminate_image_window_signal = QtCore.pyqtSignal()

    def __init__(self, main_gui):
        super(Workflow, self).__init__()
        self.main_gui = main_gui
        self.configuration = main_gui.configuration

        self.my_timer = None

        self.frames = None
        self.rank_frames = None
        self.align_frames = None
        self.alignment_points = None
        self.stack_frames = None
        self.stacked_image_name = None
        self.postprocessed_image_name = None
        self.postprocessed_image = None
        self.postproc_input_image = None
        self.postproc_input_name = None
        self.job_type = None
        self.attached_log_name = None
        self.attached_log_file = None
        self.stdout_saved = None
        self.output_redirected = False
        self.protocol_file = None

        # Switch alignment point debugging on / off.
        self.debug_AP = False

        # The following code works on Windows and Linux systems only. It is not necessary, though.
        try:
            if platform.system() == 'Windows':
                mkl_rt = CDLL('mkl_rt.dll')
            else:
                mkl_rt = CDLL('libmkl_rt.so')

            mkl_get_max_threads = mkl_rt.mkl_get_max_threads

            def mkl_set_num_threads(cores):
                mkl_rt.mkl_set_num_threads(byref(c_int(cores)))

            mkl_set_num_threads(mkl_get_max_threads())
            if self.configuration.global_parameters_protocol_level > 1:
                Miscellaneous.protocol("Number of threads used by mkl: " +
                                       str(mkl_get_max_threads()),
                                       self.attached_log_file,
                                       precede_with_timestamp=True)
        except Exception as e:
            Miscellaneous.protocol(
                "Warning: mkl_rt.dll / libmkl_rt.so does not work (not a Windows or Linux system, "
                "or Intel Math Kernel Library not installed?). " + str(e),
                self.attached_log_file,
                precede_with_timestamp=True)

        # Create the calibration object, used for potential flat / dark corrections.
        self.calibration = Calibration(self.configuration)

    @QtCore.pyqtSlot(list)
    def execute_create_master_dark(self, dark_names):
        # Create a new master dark.
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol("+++ Creating a new master dark frame +++",
                                   self.attached_log_file,
                                   precede_with_timestamp=True)
        if self.configuration.global_parameters_protocol_level > 1:
            Miscellaneous.protocol("           Input frames: " + dark_names[0],
                                   self.attached_log_file,
                                   precede_with_timestamp=False)

        try:
            self.set_main_gui_busy_signal.emit(True)
            self.calibration.create_master_dark(dark_names[0])
            # if self.configuration.global_parameters_protocol_level > 0 and \
            #         self.calibration.master_flat_removed:
            #     Miscellaneous.protocol("           A non-matching master flat was de-activated",
            #         self.attached_log_file, precede_with_timestamp=False)
            self.master_dark_created_signal.emit(True)
        except Exception as e:
            if self.configuration.global_parameters_protocol_level > 0:
                Miscellaneous.protocol(
                    "           Error in creating master dark frame: " +
                    str(e),
                    self.attached_log_file,
                    precede_with_timestamp=False)
                self.master_dark_created_signal.emit(False)

    @QtCore.pyqtSlot(list)
    def execute_create_master_flat(self, flat_names):

        # Create a new master flat.
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol("+++ Creating a new master flat frame +++",
                                   self.attached_log_file,
                                   precede_with_timestamp=True)
        if self.configuration.global_parameters_protocol_level > 1:
            Miscellaneous.protocol("           Input frames: " + flat_names[0],
                                   self.attached_log_file,
                                   precede_with_timestamp=False)

        try:
            self.set_main_gui_busy_signal.emit(True)
            self.calibration.create_master_flat(flat_names[0])
            # if self.configuration.global_parameters_protocol_level > 0 and \
            #         self.calibration.master_dark_removed:
            #     Miscellaneous.protocol("           A non-matching master dark was de-activated",
            #         self.attached_log_file, precede_with_timestamp=False)
            self.master_flat_created_signal.emit(True)
        except Error as e:
            if self.configuration.global_parameters_protocol_level > 0:
                Miscellaneous.protocol(
                    "           Error in creating master flat frame: " +
                    str(e) + ", flat frame calibration de-activated",
                    self.attached_log_file,
                    precede_with_timestamp=False)
                self.master_flat_created_signal.emit(False)

    @QtCore.pyqtSlot()
    def execute_reset_masters(self):

        # De-activate master frames.
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol("+++ De-activating master frames +++",
                                   self.attached_log_file,
                                   precede_with_timestamp=True)
        self.calibration.reset_masters()

    @QtCore.pyqtSlot(str, str, bool)
    def execute_frames(self, input_name, input_type, convert_to_grayscale):

        # If objects are left over from previous run, delete them.
        for obj in [
                self.frames, self.rank_frames, self.align_frames,
                self.alignment_points, self.stack_frames
        ]:
            if obj is not None:
                del obj

        # Force the garbage collector to release unreferenced objects.
        gc.collect()

        # Update the status bar in the main GUI.
        self.input_name = input_name
        self.set_status_bar_processing_phase("reading frames")

        # A jobs can either "stack" images or "postprocess" a single image. In the latter case,
        # input is a single image file.
        #
        # Images for stacking can either be extracted from a video file or a batch of single
        # photographs. In the first case, input_type is set to 'video', in the second case to
        # 'image'.

        if input_type == 'postproc':
            self.job_type = 'postproc'
            self.postproc_input_name = input_name

            # Reset the postprocessed image to None. This way, in saving the postprocessing result,
            # it can be checked if an image was computed in the workflow thread.
            self.postprocessed_image = None
            self.postprocessed_image_name = PostprocDataObject.set_file_name_processed(
                input_name, self.configuration.postproc_suffix,
                self.configuration.global_parameters_image_format)
            self.attached_log_name = splitext(
                input_name)[0] + '_postproc-log.txt'

        # For video file input, the Frames constructor expects the video file name for "names".
        elif input_type == 'video':
            self.job_type = 'stacking'
            names = input_name
            self.stacked_image_name = splitext(input_name)[0] + \
                                      self.configuration.stack_frames_suffix + '.' + \
                                      self.configuration.global_parameters_image_format
            self.attached_log_name = splitext(
                input_name)[0] + '_stacking-log.txt'

        # For single image input, the Frames constructor expects a list of image file names for
        # "names".
        else:  # input_type = 'image'
            self.job_type = 'stacking'
            names = [join(input_name, name) for name in listdir(input_name)]
            self.stacked_image_name = input_name + self.configuration.stack_frames_suffix + '.' + \
                                      self.configuration.global_parameters_image_format
            self.attached_log_name = input_name + '_stacking-log.txt'

        # Redirect stdout to a file if requested.
        if self.configuration.global_parameters_write_protocol_to_file != self.output_redirected:
            # Output currently redirected. Reset to stdout.
            if self.output_redirected:
                sys.stdout = self.stdout_saved
                self.output_redirected = False
            # Currently set to stdout, redirect to file now.
            else:
                try:
                    self.stdout_saved = sys.stdout
                    sys.stdout = open(self.configuration.protocol_filename,
                                      'a+')
                    self.output_redirected = True
                except IOError:
                    pass

        # Create logfile if requested to store the log with the stacked file.
        if self.attached_log_file:
            self.attached_log_file.close()
        if self.configuration.global_parameters_store_protocol_with_result:
            self.attached_log_file = open(self.attached_log_name, "w+")
        else:
            self.attached_log_file = None

        # Write a header to stdout and optionally to the logfile.
        if self.configuration.global_parameters_protocol_level > 0:
            decorator_line = (len(input_name) + 28) * "*"
            Miscellaneous.protocol(decorator_line,
                                   self.attached_log_file,
                                   precede_with_timestamp=False)
            Miscellaneous.protocol("Start processing " + input_name,
                                   self.attached_log_file)
            Miscellaneous.protocol(decorator_line,
                                   self.attached_log_file,
                                   precede_with_timestamp=False)

        # Initalize the timer object used to measure execution times of program sections.
        self.my_timer = timer()
        self.my_timer.create('Execution over all')

        if self.job_type == 'stacking':
            # Write parameters to the protocol.
            if self.configuration.global_parameters_protocol_level > 1:
                Miscellaneous.print_stacking_parameters(
                    self.configuration, self.attached_log_file)
            # Decide on the objects to be buffered, depending on configuration parameter.
            buffer_original, buffer_monochrome, buffer_gaussian, buffer_laplacian = \
                Frames.set_buffering(self.configuration.global_parameters_buffering_level)

            if self.configuration.global_parameters_protocol_level > 1:
                Miscellaneous.protocol(
                    "+++ Buffering level is " +
                    str(self.configuration.global_parameters_buffering_level) +
                    " +++", self.attached_log_file)
            if buffer_original:
                if self.configuration.global_parameters_protocol_level > 0:
                    Miscellaneous.protocol("+++ Start reading frames +++",
                                           self.attached_log_file)
            try:
                self.frames = Frames(
                    self.configuration,
                    names,
                    type=input_type,
                    calibration=self.calibration,
                    convert_to_grayscale=convert_to_grayscale,
                    progress_signal=self.work_current_progress_signal,
                    buffer_original=buffer_original,
                    buffer_monochrome=buffer_monochrome,
                    buffer_gaussian=buffer_gaussian,
                    buffer_laplacian=buffer_laplacian)
                if self.configuration.global_parameters_protocol_level > 1:
                    Miscellaneous.protocol("           Number of images: " +
                                           str(self.frames.number) +
                                           ", image shape: " +
                                           str(self.frames.shape),
                                           self.attached_log_file,
                                           precede_with_timestamp=False)
                    if self.frames.calibration_matches:
                        if self.calibration.master_dark_frame_adapted is not None and \
                                self.calibration.inverse_master_flat_frame is not None:
                            Miscellaneous.protocol(
                                "           Dark / flat frame calibration is active",
                                self.attached_log_file,
                                precede_with_timestamp=False)
                        elif self.calibration.master_dark_frame_adapted is not None:
                            Miscellaneous.protocol(
                                "           Dark frame calibration is active",
                                self.attached_log_file,
                                precede_with_timestamp=False)
                        elif self.calibration.inverse_master_flat_frame is not None:
                            Miscellaneous.protocol(
                                "           Flat frame calibration is active",
                                self.attached_log_file,
                                precede_with_timestamp=False)
                    else:
                        Miscellaneous.protocol(
                            "           No matching master dark / flat frames found, "
                            "calibration de-activated",
                            self.attached_log_file,
                            precede_with_timestamp=False)
            except Error as e:
                if self.configuration.global_parameters_protocol_level > 0:
                    Miscellaneous.protocol(
                        "Error: " + e.message + ", continue with next job\n",
                        self.attached_log_file)
                self.work_next_task_signal.emit("Next job")
                return
            except Exception as e:
                if self.configuration.global_parameters_protocol_level > 0:
                    Miscellaneous.protocol(
                        "Error: " + str(e) + ", continue with next job\n",
                        self.attached_log_file)
                self.work_next_task_signal.emit("Next job")
                return

            # Look up the available RAM (without paging)
            virtual_memory = dict(psutil.virtual_memory()._asdict())
            available_ram = virtual_memory['available'] / 1e9

            # Compute the approximate RAM usage of this job at the selected buffering level.
            needed_ram = self.frames.compute_required_buffer_size(
                self.configuration.global_parameters_buffering_level)
            if self.configuration.global_parameters_protocol_level > 1:
                Miscellaneous.protocol("           RAM required (Gbytes): " +
                                       str(needed_ram) + ", available: " +
                                       str(available_ram),
                                       self.attached_log_file,
                                       precede_with_timestamp=False)

            # If the required RAM is not available, test if lowering the buffering level would help.
            if needed_ram > available_ram:
                recommended_level = None
                for level in range(
                        self.configuration.global_parameters_buffering_level -
                        1, -1, -1):
                    alternative_ram = self.frames.compute_required_buffer_size(
                        level)
                    if alternative_ram < available_ram:
                        recommended_level = level
                        break

                # If an appropriate level was found, write it as a recommendation to the protocol.
                if self.configuration.global_parameters_protocol_level > 0:
                    if recommended_level is not None:
                        Miscellaneous.protocol(
                            "Error: Too little RAM for chosen buffering level,"
                            " recommended level: " + str(recommended_level) +
                            ", continuing with next job\n",
                            self.attached_log_file)
                    else:
                        Miscellaneous.protocol(
                            "Error: Too little RAM for this job, "
                            "continuing with the next one\n",
                            self.attached_log_file)

                # Continue with the next job.
                self.work_next_task_signal.emit("Next job")
                return

            # The RAM seems to be sufficient, continue with ranking frames.
            self.work_next_task_signal.emit("Rank frames")

        # Job type is 'postproc'.
        else:
            try:
                self.postproc_input_image = Frames.read_image(
                    self.postproc_input_name)
            except Error as e:
                if self.configuration.global_parameters_protocol_level > 0:
                    Miscellaneous.protocol(
                        "Error: " + e.message + ", continue with next job\n",
                        self.attached_log_file)
                self.work_next_task_signal.emit("Next job")
                return
            except Exception as e:
                if self.configuration.global_parameters_protocol_level > 0:
                    Miscellaneous.protocol(
                        "Error: " + str(e) + ", continue with next job\n",
                        self.attached_log_file)
                self.work_next_task_signal.emit("Next job")
                return

            # Convert 8 bit to 16 bit.
            if self.postproc_input_image.dtype == uint8:
                self.postproc_input_image = self.postproc_input_image.astype(
                    uint16) * 256
            self.work_next_task_signal.emit("Postprocessing")

    @QtCore.pyqtSlot()
    def execute_rank_frames(self):

        self.set_status_bar_processing_phase("ranking frames")
        # Rank the frames by their overall local contrast.
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol("+++ Start ranking images +++",
                                   self.attached_log_file)
        self.my_timer.create_no_check('Ranking images')

        try:
            self.rank_frames = RankFrames(self.frames, self.configuration,
                                          self.work_current_progress_signal)
            self.rank_frames.frame_score()
            self.my_timer.stop('Ranking images')
        except Error as e:
            if self.configuration.global_parameters_protocol_level > 0:
                Miscellaneous.protocol(
                    "Error: " + e.message + ", continue with next job\n",
                    self.attached_log_file)
            self.my_timer.stop('Ranking images')
            self.work_next_task_signal.emit("Next job")
            return
        except Exception as e:
            if self.configuration.global_parameters_protocol_level > 0:
                Miscellaneous.protocol(
                    "Error: " + str(e) + ", continue with next job\n",
                    self.attached_log_file)
            self.my_timer.stop('Ranking images')
            self.work_next_task_signal.emit("Next job")
            return

        if self.configuration.global_parameters_protocol_level > 1:
            Miscellaneous.protocol("           Index of best frame: " +
                                   str(self.rank_frames.frame_ranks_max_index),
                                   self.attached_log_file,
                                   precede_with_timestamp=False)

        self.work_next_task_signal.emit("Align frames")

    @QtCore.pyqtSlot(int, int, int, int)
    def execute_align_frames(self, y_low_opt, y_high_opt, x_low_opt,
                             x_high_opt):

        self.set_status_bar_processing_phase("aligning frames")
        # Initialize the frame alignment object.
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol("+++ Initializing frame alignment +++",
                                   self.attached_log_file)
        self.align_frames = AlignFrames(
            self.frames,
            self.rank_frames,
            self.configuration,
            progress_signal=self.work_current_progress_signal)

        if self.configuration.align_frames_mode == "Surface":

            auto_execution = False
            if y_low_opt == 0 and y_high_opt == 0 and x_low_opt == 0 and x_high_opt == 0:
                auto_execution = True
            elif (y_high_opt - y_low_opt) / self.frames.shape[0] > \
                    self.configuration.align_frames_max_stabilization_patch_fraction or \
                    (x_high_opt - x_low_opt) / self.frames.shape[1] > \
                    self.configuration.align_frames_max_stabilization_patch_fraction and \
                    self.configuration.global_parameters_protocol_level > 0:
                Miscellaneous.protocol(
                    "           Stabilization patch selected manually is "
                    "too large, switch to automatic mode",
                    self.attached_log_file,
                    precede_with_timestamp=False)
                auto_execution = True
            elif (y_high_opt - y_low_opt) / self.frames.shape[0] < \
                    self.configuration.align_frames_min_stabilization_patch_fraction or \
                    (x_high_opt - x_low_opt) / self.frames.shape[1] < \
                    self.configuration.align_frames_min_stabilization_patch_fraction and \
                    self.configuration.global_parameters_protocol_level > 0:
                Miscellaneous.protocol(
                    "           Stabilization patch selected manually is "
                    "too small, switch to automatic mode",
                    self.attached_log_file,
                    precede_with_timestamp=False)
                auto_execution = True

            # Compute the local rectangular patch in the image where the L gradient is highest
            # in both x and y direction. The scale factor specifies how much smaller the patch
            # is compared to the whole image frame. In batch mode, variable "auto_execution" is
            # set to "True", and the automatic patch computation is the only option.
            if auto_execution or self.configuration.align_frames_automation:

                self.my_timer.create_no_check('Select optimal alignment patch')
                (y_low_opt, y_high_opt, x_low_opt, x_high_opt) = \
                    self.align_frames.compute_alignment_rect(
                        self.configuration.align_frames_rectangle_scale_factor)
                self.my_timer.stop('Select optimal alignment patch')
                if self.configuration.global_parameters_protocol_level > 1:
                    Miscellaneous.protocol(
                        "           Alignment rectangle, computed automatically: "
                        + str(y_low_opt) + "<y<" + str(y_high_opt) + ", " +
                        str(x_low_opt) + "<x<" + str(x_high_opt),
                        self.attached_log_file,
                        precede_with_timestamp=False)

            # As an alternative, set the coordinates of the rectangular patch explicitly.
            else:
                # The image displayed in the stabilization patch editor was shrunk on all four
                # sides by a number of pixels given by the alignment search width parameter.
                # Therefore, the resulting coordinates of the stabilization patch have to be
                # corrected by this offset now.
                y_low_opt += self.configuration.align_frames_search_width
                y_high_opt += self.configuration.align_frames_search_width
                x_low_opt += self.configuration.align_frames_search_width
                x_high_opt += self.configuration.align_frames_search_width

                self.align_frames.set_alignment_rect(y_low_opt, y_high_opt,
                                                     x_low_opt, x_high_opt)
                if self.configuration.global_parameters_protocol_level > 1:
                    Miscellaneous.protocol(
                        "           Alignment rectangle, set by the user: "******"<y<" + str(y_high_opt) + ", " +
                        str(x_low_opt) + "<x<" + str(x_high_opt),
                        self.attached_log_file,
                        precede_with_timestamp=False)

        # Align all frames globally relative to the frame with the highest score.
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol("+++ Start aligning all frames +++",
                                   self.attached_log_file)

        self.my_timer.create_no_check('Global frame alignment')

        # Align all frames in "Surface" mode.
        if self.configuration.align_frames_mode == "Surface":
            # Try the frame alignment using the alignment patch with the highest quality first. If
            # for at least one frame no valid shift can be found, try the next alignment patch. If
            # valid shifts cannot be computed with any patch, abort processing of this job and go to
            # the next one.
            number_patches = len(self.align_frames.alignment_rect_qualities)
            for patch_index in range(number_patches):
                self.align_frames.select_alignment_rect(patch_index)
                try:
                    self.align_frames.align_frames()
                    # Everything is fine, no need to try another stabilization patch.
                    break
                except (NotSupportedError, ArgumentError) as e:
                    if self.configuration.global_parameters_protocol_level > 0:
                        Miscellaneous.protocol(
                            "Error: " + e.message +
                            ", continue with next job\n",
                            self.attached_log_file)
                    self.my_timer.stop('Global frame alignment')
                    self.work_next_task_signal.emit("Next job")
                    return
                # For some frames no valid shift could be computed. This would create problems later
                # in the workflow. Therefore, try again with another stabilization patch.
                except InternalError as e:
                    if self.configuration.global_parameters_protocol_level > 0:
                        Miscellaneous.protocol(
                            "Warning: " + e.message + ", will try another"
                            " stabilization patch", self.attached_log_file)
                    # If there is no more patch available, skip this job.
                    if patch_index == number_patches - 1:
                        if self.configuration.global_parameters_protocol_level > 0:
                            Miscellaneous.protocol(
                                "Error: No alternative stabilization patch"
                                " available, continue with next job\n",
                                self.attached_log_file)
                        self.my_timer.stop('Global frame alignment')
                        self.work_next_task_signal.emit("Next job")
                        return
                    # Continue with the next best stabilization patch.
                    else:
                        y_low_opt, y_high_opt, x_low_opt, x_high_opt = \
                            self.align_frames.alignment_rect_bounds[patch_index + 1]
                        if self.configuration.global_parameters_protocol_level > 0:
                            Miscellaneous.protocol(
                                "           Next alignment rectangle tried: " +
                                str(y_low_opt) + "<y<" + str(y_high_opt) +
                                ", " + str(x_low_opt) + "<x<" +
                                str(x_high_opt),
                                self.attached_log_file,
                                precede_with_timestamp=False)

        # Align all frames in "Planet" mode.
        else:
            try:
                self.align_frames.align_frames()
            except Error as e:
                if self.configuration.global_parameters_protocol_level > 0:
                    Miscellaneous.protocol(
                        "Error: " + e.message + ", continue with next job\n",
                        self.attached_log_file)
                self.my_timer.stop('Global frame alignment')
                self.work_next_task_signal.emit("Next job")
                return
            except Exception as e:
                if self.configuration.global_parameters_protocol_level > 0:
                    Miscellaneous.protocol(
                        "Error: " + str(e) + ", continue with next job\n",
                        self.attached_log_file)
                self.my_timer.stop('Global frame alignment')
                self.work_next_task_signal.emit("Next job")
                return

        self.my_timer.stop('Global frame alignment')

        if self.configuration.global_parameters_protocol_level > 1:
            Miscellaneous.protocol(
                "           Pixel range common to all frames: " +
                str(self.align_frames.intersection_shape[0][0]) + "<y<" +
                str(self.align_frames.intersection_shape[0][1]) + ", " +
                str(self.align_frames.intersection_shape[1][0]) + "<x<" +
                str(self.align_frames.intersection_shape[1][1]),
                self.attached_log_file,
                precede_with_timestamp=False)

        # Compute the average frame.
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol("+++ Start computing the average frame +++",
                                   self.attached_log_file)
        self.my_timer.create_no_check('Compute reference frame')
        self.align_frames.average_frame()
        self.my_timer.stop('Compute reference frame')
        if self.configuration.global_parameters_protocol_level > 1:
            Miscellaneous.protocol(
                "           The average frame was computed using the best " +
                str(self.align_frames.average_frame_number) + " frames.",
                self.attached_log_file,
                precede_with_timestamp=False)

        self.work_next_task_signal.emit("Select stack size")

    @QtCore.pyqtSlot(int, int, int, int)
    def execute_set_roi(self, y_min, y_max, x_min, x_max):

        self.set_status_bar_processing_phase("setting the ROI")
        if self.configuration.global_parameters_protocol_level > 0 and y_min != 0 or y_max != 0:
            Miscellaneous.protocol(
                "+++ Start setting a ROI and computing a new average frame +++",
                self.attached_log_file)
        self.my_timer.create_no_check('Setting ROI and new reference')
        self.align_frames.set_roi(y_min, y_max, x_min, x_max)
        self.my_timer.stop('Setting ROI and new reference')

        if self.configuration.global_parameters_protocol_level > 1 and y_min != 0 or y_max != 0:
            Miscellaneous.protocol("           ROI, set by the user: "******"<y<" + str(y_max) + ", " +
                                   str(x_min) + "<x<" + str(x_max),
                                   self.attached_log_file,
                                   precede_with_timestamp=False)

        self.work_next_task_signal.emit("Set alignment points")

    @QtCore.pyqtSlot()
    def execute_set_alignment_points(self):

        # If not executing in "automatic" mode, the APs are created on the main_gui thread.
        if self.main_gui.automatic:
            self.set_status_bar_processing_phase("creating alignment points")
            # Initialize the AlignmentPoints object.
            self.my_timer.create_no_check('Initialize alignment point object')
            self.alignment_points = AlignmentPoints(
                self.configuration,
                self.frames,
                self.rank_frames,
                self.align_frames,
                progress_signal=self.work_current_progress_signal)
            self.my_timer.stop('Initialize alignment point object')

            # Create alignment points, and create an image with wll alignment point boxes and patches.
            if self.configuration.global_parameters_protocol_level > 0:
                Miscellaneous.protocol(
                    "+++ Start creating alignment points +++",
                    self.attached_log_file)
            self.my_timer.create_no_check('Create alignment points')

            # If a ROI is selected, alignment points are created in the ROI window only.
            self.alignment_points.create_ap_grid()

            self.my_timer.stop('Create alignment points')

        self.work_next_task_signal.emit("Compute frame qualities")

    @QtCore.pyqtSlot()
    def execute_compute_frame_qualities(self):

        if self.configuration.global_parameters_protocol_level > 1:
            Miscellaneous.protocol(
                "           Number of alignment points selected: " +
                str(len(self.alignment_points.alignment_points)) +
                ", aps dropped because too dim: " +
                str(self.alignment_points.alignment_points_dropped_dim) +
                ", aps dropped because too little structure: " +
                str(self.alignment_points.alignment_points_dropped_structure),
                self.attached_log_file,
                precede_with_timestamp=False)

        self.set_status_bar_processing_phase(
            "ranking all frames at all alignment points")
        # For each alignment point rank frames by their quality.
        self.my_timer.create_no_check('Rank frames at alignment points')
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol(
                "+++ Start ranking all frames at all alignment points +++",
                self.attached_log_file)
        self.alignment_points.compute_frame_qualities()
        self.my_timer.stop('Rank frames at alignment points')

        self.work_next_task_signal.emit("Stack frames")

    @QtCore.pyqtSlot()
    def execute_stack_frames(self):

        self.set_status_bar_processing_phase("stacking frames")
        # Allocate StackFrames object.
        self.stack_frames = StackFrames(
            self.configuration,
            self.frames,
            self.align_frames,
            self.alignment_points,
            self.my_timer,
            progress_signal=self.work_current_progress_signal,
            debug=self.debug_AP,
            create_image_window_signal=self.create_image_window_signal,
            update_image_window_signal=self.update_image_window_signal,
            terminate_image_window_signal=self.terminate_image_window_signal)

        # Stack all frames.
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol(
                "+++ Start stacking " + str(self.alignment_points.stack_size) +
                " frames +++", self.attached_log_file)
        self.stack_frames.stack_frames()

        if self.configuration.global_parameters_protocol_level > 1 and \
                len(self.alignment_points.alignment_points) > 0:
            Miscellaneous.protocol(
                "\n           Distribution of shifts at alignment points:",
                self.attached_log_file,
                precede_with_timestamp=False)
            Miscellaneous.protocol(self.stack_frames.print_shift_table() +
                                   "\n",
                                   self.attached_log_file,
                                   precede_with_timestamp=False)

        self.set_status_bar_processing_phase("merging AP patches")
        # Merge the stacked alignment point buffers into a single image.
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol(
                "+++ Start merging all alignment patches and the background +++",
                self.attached_log_file)
        self.stack_frames.merge_alignment_point_buffers()

        self.work_next_task_signal.emit("Save stacked image")

    @QtCore.pyqtSlot()
    def execute_save_stacked_image(self):

        self.set_status_bar_processing_phase("saving result")
        # Save the image as 16bit int (color or mono).
        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol("+++ Start saving the stacked image +++",
                                   self.attached_log_file)
        self.my_timer.create_no_check('Saving the stacked image')
        self.frames.save_image(self.stacked_image_name,
                               self.stack_frames.stacked_image,
                               color=self.frames.color,
                               avoid_overwriting=False)
        self.my_timer.stop('Saving the stacked image')
        if self.configuration.global_parameters_protocol_level > 1:
            Miscellaneous.protocol(
                "           The stacked image was written to: " +
                self.stacked_image_name,
                self.attached_log_file,
                precede_with_timestamp=False)

        # If postprocessing is included after stacking, set the stacked image as input.
        if self.configuration.global_parameters_include_postprocessing:
            self.postproc_input_image = self.stack_frames.stacked_image
            self.postproc_input_name = self.stacked_image_name
            self.postprocessed_image_name = PostprocDataObject.set_file_name_processed(
                self.stacked_image_name, self.configuration.postproc_suffix,
                self.configuration.global_parameters_image_format)
            self.work_next_task_signal.emit("Postprocessing")
        else:
            self.work_next_task_signal.emit("Next job")

            # Print timing info for this job.
            self.my_timer.stop('Execution over all')
            if self.configuration.global_parameters_protocol_level > 0:
                self.my_timer.protocol(self.attached_log_file)

    @QtCore.pyqtSlot()
    def execute_postprocess_image(self):

        if self.configuration.global_parameters_protocol_level > 0:
            Miscellaneous.protocol("+++ Start postprocessing +++",
                                   self.attached_log_file)
        self.my_timer.create_no_check('Conputing image postprocessing')

        # Initialize the new image with the original image.
        self.postprocessed_image = self.postproc_input_image

        # Apply all sharpening layers of the postprocessing version selected last time.
        version_index = self.configuration.postproc_data_object.version_selected
        postproc_layers = self.configuration.postproc_data_object.versions[
            version_index].layers
        for layer in postproc_layers:
            self.postprocessed_image = Miscellaneous.gaussian_sharpen(
                self.postprocessed_image,
                layer.amount,
                layer.radius,
                luminance_only=layer.luminance_only)
        self.my_timer.stop('Conputing image postprocessing')

        self.work_next_task_signal.emit("Save postprocessed image")

    @QtCore.pyqtSlot(object)
    def execute_save_postprocessed_image(self, postprocessed_image):

        # The signal payload is None only if the editor was left with "cancel" in interactive mode.
        # In this case, skip saving the result and proceed with the next job.
        if postprocessed_image is not None:
            self.set_status_bar_processing_phase("saving result")
            # Save the image as 16bit int (color or mono).
            if self.configuration.global_parameters_protocol_level > 0:
                Miscellaneous.protocol(
                    "+++ Start saving the postprocessed image +++",
                    self.attached_log_file)
            self.my_timer.create_no_check('Saving the postprocessed image')
            Frames.save_image(self.postprocessed_image_name,
                              postprocessed_image,
                              color=(len(postprocessed_image.shape) == 3),
                              avoid_overwriting=False)
            self.my_timer.stop('Saving the postprocessed image')
            if self.configuration.global_parameters_protocol_level > 1:
                Miscellaneous.protocol(
                    "           The postprocessed image was written to: " +
                    self.postprocessed_image_name,
                    self.attached_log_file,
                    precede_with_timestamp=False)

            if self.configuration.global_parameters_protocol_level > 1:
                Miscellaneous.print_postproc_parameters(
                    self.configuration.postproc_data_object.versions[
                        self.configuration.postproc_data_object.
                        version_selected].layers, self.attached_log_file)

        self.work_next_task_signal.emit("Next job")

        # Print timing info for this job.
        self.my_timer.stop('Execution over all')
        if self.configuration.global_parameters_protocol_level > 0:
            self.my_timer.protocol(self.attached_log_file)

    def set_status_bar_processing_phase(self, phase):
        """
        Put a text of the form
            "Processing < job name >, < processing step >."
        on the main window status bar.

        :param phase: Processing phase (string)
        :return: -
        """

        self.set_status_bar_signal.emit(
            "Processing " + self.input_name + ", " + phase + ".", "black")