Example #1
0
 def pre_process(self):
     in_pData = self.get_plugin_in_datasets()[0]
     in_meta_data = self.get_in_meta_data()[0]
     data = self.get_in_datasets()[0]
     data_shape = data.get_shape()
     width = data_shape[-1]
     use_mask = self.parameters['masking']
     if use_mask is True:
         ratio = self.parameters['ratio']
         if ratio is None:
             try:
                 cor = np.min(in_meta_data.get('centre_of_rotation'))
                 ratio = (min(cor, abs(width - cor))) / (width * 0.5)
             except KeyError:
                 ratio = 1.0
         self.mask = self.circle_mask(width, ratio)
     else:
         self.mask = np.ones((width, width), dtype=np.float32)
     self.method = self.parameters['method']
     if not (self.method == 'percentile' or self.method == 'extrema'):
         msg = "\n***********************************************\n"\
             "!!! ERROR !!! -> Wrong method. Please use only one of "\
             "the provided options \n"\
             "***********************************************\n"
         logging.warn(msg)
         cu.user_message(msg)
         raise ValueError(msg)
     self.p_min, self.p_max = np.sort(
         np.clip(np.asarray(self.parameters['p_range'], dtype=np.float32),
                 0.0, 100.0))
Example #2
0
    def _transport_process(self, plugin):
        """ Organise required data and execute the main plugin processing.

        :param plugin plugin: The current plugin instance.
        """
        pDict, result, nTrans = self._initialise(plugin)
        cp, sProc, sTrans = self.__get_checkpoint_params(plugin)

        count = 0  # temporary solution
        prange = range(sProc, pDict['nProc'])
        kill = False
        for count in range(sTrans, nTrans):
            end = True if count == nTrans - 1 else False
            self._log_completion_status(count, nTrans, plugin.name)

            # get the transfer data
            transfer_data = self._transfer_all_data(count)
            # loop over the process data
            result, kill = self._process_loop(plugin, prange, transfer_data,
                                              count, pDict, result, cp)

            self._return_all_data(count, result, end)

            if kill:
                return 1

        if not kill:
            cu.user_message("%s - 100%% complete" % (plugin.name))
Example #3
0
 def pre_process(self):
     self.drop = np.int16(self.parameters['row_drop'])
     self.smin, self.smax = np.int16(self.parameters['search_area'])
     self.search_radius = np.float32(self.parameters['search_radius'])
     self.search_step = np.float32(self.parameters['step'])
     self.ratio = np.float32(self.parameters['ratio'])
     self.est_cor = self.parameters['start_pixel']
     self.broadcast_method = str(self.parameters['broadcast_method'])
     self.error_msg_1 = ""
     self.error_msg_2 = ""
     self.error_msg_3 = ""
     if not ((self.broadcast_method == 'mean') or
             (self.broadcast_method == 'median') or
             (self.broadcast_method == 'linear_fit') or
             (self.broadcast_method == 'nearest')):
         self.error_msg_3 = "!!! WARNING !!! Selected broadcasting "\
          "method is out of the list. Use the default option: 'median'"
         logging.warn(self.error_msg_3)
         cu.user_message(self.error_msg_3)
         self.broadcast_method = 'median'
     in_pData = self.get_plugin_in_datasets()[0]
     data = self.get_in_datasets()[0]
     starts, stops, steps = data.get_preview().get_starts_stops_steps()[0:3]
     start_ind = starts[1]
     stop_ind = stops[1]
     step_ind = steps[1]
     name = data.get_name()
     pre_start = self.exp.meta_data.get(name + '_preview_starts')[1]
     pre_stop = self.exp.meta_data.get(name + '_preview_stops')[1]
     pre_step = self.exp.meta_data.get(name + '_preview_steps')[1]
     self.origin_prev = np.arange(pre_start, pre_stop, pre_step)
     self.plugin_prev = self.origin_prev[start_ind:stop_ind:step_ind]
Example #4
0
    def _transport_process(self, plugin):
        """ Organise required data and execute the main plugin processing.

        :param plugin plugin: The current plugin instance.
        """
        self.process_setup(plugin)
        pDict = self.pDict
        result = [
            np.empty(d._get_plugin_data().get_shape_transfer(),
                     dtype=np.float32) for d in pDict['out_data']
        ]

        # loop over the transfer data
        nTrans = pDict['nTrans']
        for count in range(nTrans):
            end = True if count == nTrans - 1 else False
            percent_complete = count / (nTrans * 0.01)
            cu.user_message("%s - %3i%% complete" %
                            (plugin.name, percent_complete))
            # get the transfer data
            transfer_data = self._transfer_all_data(count)

            # loop over the process data
            for i in range(pDict['nProc']):
                data = self._get_input_data(plugin, transfer_data, i, count)
                res = self._get_output_data(plugin.plugin_process_frames(data),
                                            i)
                for j in pDict['nOut']:
                    out_sl = pDict['out_sl']['process'][i][j]
                    result[j][out_sl] = res[j]

            self._return_all_data(count, result, end)

        cu.user_message("%s - 100%% complete" % (plugin.name))
        plugin._revert_preview(pDict['in_data'])
    def _transport_process(self, plugin):
        """ Organise required data and execute the main plugin processing.

        :param plugin plugin: The current plugin instance.
        """
        pDict, result, nTrans = self._initialise(plugin)
        cp, sProc, sTrans = self.__get_checkpoint_params(plugin)

        count = 0  # temporary solution
        prange = range(sProc, pDict['nProc'])
        kill = False
        for count in range(sTrans, nTrans):
            end = True if count == nTrans-1 else False
            self._log_completion_status(count, nTrans, plugin.name)

            # get the transfer data
            transfer_data = self._transfer_all_data(count)
            # loop over the process data
            result, kill = self._process_loop(
                    plugin, prange, transfer_data, count, pDict, result, cp)

            self._return_all_data(count, result, end)

            if kill:
                return 1

        if not kill:
            cu.user_message("%s - 100%% complete" % (plugin.name))
Example #6
0
    def _process(self, plugin):
        """ Organise required data and execute the main plugin processing.

        :param plugin plugin: The current plugin instance.
        """
        self.process_checks()
        in_data, out_data = plugin.get_datasets()

        expInfo = plugin.exp.meta_data
        in_slice_list = self.__get_all_slice_lists(in_data, expInfo)
        out_slice_list = self.__get_all_slice_lists(out_data, expInfo)

        squeeze_dict = self.__set_functions(in_data, 'squeeze')
        expand_dict = self.__set_functions(out_data, 'expand')

        number_of_slices_to_process = len(in_slice_list[0])
        for count in range(number_of_slices_to_process):
            percent_complete = count / (number_of_slices_to_process * 0.01)
            cu.user_message("%s - %3i%% complete" %
                            (plugin.name, percent_complete))

            section, slice_list = \
                self.__get_all_padded_data(in_data, in_slice_list, count,
                                           squeeze_dict)
            result = plugin.process_frames(section, slice_list)
            self.__set_out_data(out_data, out_slice_list, result, count,
                                expand_dict)

        cu.user_message("%s - 100%% complete" % (plugin.name))
        plugin._revert_preview(in_data)
Example #7
0
    def __real_plugin_run(self, plugin_list, out_data_objs, start, stop):
        """ Execute the plugin.
        """
        exp = self.exp
        for i in range(start, stop):
            link_type = "final_result" if i is len(plugin_list)-2 else \
                "intermediate"

            exp._barrier()

            for key in out_data_objs[i - start]:
                exp.index["out_data"][key] = out_data_objs[i - start][key]

            exp._barrier()
            plugin = pu.plugin_loader(exp, plugin_list[i])

            exp._barrier()
            cu.user_message("*Running the %s plugin*" % (plugin_list[i]['id']))
            plugin._run_plugin(exp, self)

            exp._barrier()
            if self.mpi:
                cu.user_messages_from_all(plugin.name,
                                          plugin.executive_summary())
            else:
                for message in plugin.executive_summary():
                    cu.user_message("%s - %s" % (plugin.name, message))

            exp._barrier()
            out_datasets = plugin.parameters["out_datasets"]
            exp._reorganise_datasets(out_datasets, link_type)
Example #8
0
    def _run_plugin_list_setup(self, plugin_list):
        """ Run the plugin list through the framework without executing the
        main processing.
        """
        plugin_list._check_loaders()
        self.__check_gpu()

        n_loaders = self.exp.meta_data.plugin_list._get_n_loaders()
        n_plugins = plugin_list._get_n_processing_plugins()
        plist = plugin_list.plugin_list

        self.exp._setup(self, plugin_list)
        # set loaders
        for i in range(n_loaders):
            pu.plugin_loader(self.exp, plist[i])
            self.exp._set_initial_datasets()

        # run all plugin setup methods and store information in experiment
        # collection
        count = 0
        for plugin_dict in plist[n_loaders:n_loaders + n_plugins]:
            plugin = pu.plugin_loader(self.exp, plugin_dict, check=True)
            plugin._revert_preview(plugin.get_in_datasets())
            plugin_dict['cite'] = plugin.get_citation_information()
            plugin._clean_up()
            self.exp._update(plugin_dict)
            self.exp._merge_out_data_to_in()
            count += 1
        self.exp._reset_datasets()

        plugin_list._add_missing_savers(self.exp)
        cu.user_message("Plugin list check complete!")
        #  ********* transport function ***********
        self._transport_update_plugin_list()
Example #9
0
    def _run_plugin_list_check(self, plugin_list):
        """ Run the plugin list through the framework without executing the
        main processing.
        """
        plugin_list._check_loaders()
        n_loaders = plugin_list._get_n_loaders()

        self.__check_gpu()

        check_list = np.arange(len(plugin_list.plugin_list)) - n_loaders
        self.__fake_plugin_list_run(plugin_list, check_list, setnxs=True)

        savers_idx_before = plugin_list._get_savers_index()
        plugin_list._add_missing_savers(self.exp.index['in_data'].keys())

        #  ********* transport function ***********
        self._transport_update_plugin_list()

        self.exp._clear_data_objects()

        check_list = np.array(list(set(plugin_list._get_savers_index()).
                              difference(set(savers_idx_before)))) - n_loaders
        self.__fake_plugin_list_run(plugin_list, check_list)

        self.exp._clear_data_objects()
        cu.user_message("Plugin list check complete!")
Example #10
0
    def real_plugin_run(self, plugin_list, out_data_objs, start, stop):
        exp = self.exp
        for i in range(start, stop):
            link_type = "final_result" if i is len(plugin_list)-2 else \
                "intermediate"

            exp.barrier()

            for key in out_data_objs[i - start]:
                exp.index["out_data"][key] = out_data_objs[i - start][key]

            exp.barrier()
            plugin = pu.plugin_loader(exp, plugin_list[i])

            exp.barrier()
            cu.user_message("*Running the %s plugin*" % (plugin_list[i]['id']))
            plugin.run_plugin(exp, self)

            exp.barrier()
            if self.mpi:
                cu.user_message_from_all(plugin.name,
                                         plugin.executive_summary())
            else:
                cu.user_message("%s - %s" % (plugin.name,
                                plugin.executive_summary()))

            exp.barrier()
            out_datasets = plugin.parameters["out_datasets"]
            exp.reorganise_datasets(out_datasets, link_type)
Example #11
0
    def __run_plugin(self, plugin_dict):
        plugin = self._transport_load_plugin(self.exp, plugin_dict)

        #  ********* transport function ***********
        self._transport_pre_plugin()

        cu.user_message("*Running the %s plugin*" % plugin.name)

        #  ******** transport 'process' function is called inside here ********
        plugin._run_plugin(self.exp, self)  # plugin driver
        self.exp._barrier()

        cu._output_summary(self.exp.meta_data.get("mpi"), plugin)

        plugin._clean_up()

        finalise = self.exp._finalise_experiment_for_current_plugin()

        #  ********* transport function ***********
        self._transport_post_plugin()

        for data in finalise['remove'] + finalise['replace']:
            #  ********* transport function ***********
            self._transport_terminate_dataset(data)

        self.exp._reorganise_datasets(finalise)
Example #12
0
    def _process(self, plugin):
        """ Organise required data and execute the main plugin processing.

        :param plugin plugin: The current plugin instance.
        """
        self.process_checks()
        in_data, out_data = plugin.get_datasets()

        expInfo = plugin.exp.meta_data
        in_slice_list = self.__get_all_slice_lists(in_data, expInfo)
        out_slice_list = self.__get_all_slice_lists(out_data, expInfo)

        squeeze_dict = self.__set_functions(in_data, 'squeeze')
        expand_dict = self.__set_functions(out_data, 'expand')

        number_of_slices_to_process = len(in_slice_list[0])
        for count in range(number_of_slices_to_process):
            percent_complete = count/(number_of_slices_to_process * 0.01)
            cu.user_message("%s - %3i%% complete" %
                            (plugin.name, percent_complete))

            section, slice_list = \
                self.__get_all_padded_data(in_data, in_slice_list, count,
                                           squeeze_dict)
            result = plugin.process_frames(section, slice_list)
            self.__set_out_data(out_data, out_slice_list, result, count,
                                expand_dict)

        cu.user_message("%s - 100%% complete" % (plugin.name))
        plugin._revert_preview(in_data)
Example #13
0
 def executive_summary(self):
     if ((self.error_msg_1 == "") and (self.error_msg_2 == "")):
         msg = "Centre of rotation is : %s" % (str(
             self.cor_for_executive_summary))
     else:
         msg = "\n" + self.error_msg_1 + "\n" + self.error_msg_2
         msg2 = "(Not well) estimated centre of rotation is : %s" % (str(
             self.cor_for_executive_summary))
         cu.user_message(msg2)
     return [msg]
Example #14
0
    def run_plugin_list_check(self, plugin_list):
        self.exp.barrier()
        self.check_loaders_and_savers(plugin_list)

        self.exp.barrier()
        pu.run_plugins(self.exp, plugin_list, check=True)

        self.exp.barrier()
        self.exp.clear_data_objects()

        self.exp.barrier()
        cu.user_message("Plugin list check complete!")
Example #15
0
 def pre_process(self):
     super(ImageSaver, self).pre_process()
     self.file_format = self.parameters['format']
     num_bit = self.parameters['num_bit']
     self.pData = self.get_plugin_in_datasets()[0]
     if not (num_bit==8 or num_bit==16 or num_bit==32):
         self.num_bit = 32
         msg = "\n***********************************************\n"\
             "This option %s is not available. Reset to 32 \n"\
             %str(num_bit)
         cu.user_message(msg)
     else:
         self.num_bit = num_bit
     self._data_range = self._get_min_and_max()
Example #16
0
    def _run_plugin_list_check(self, plugin_list):
        """ Run the plugin list through the framework without executing the
        main processing.
        """
        self.exp._barrier()
        self.__check_loaders_and_savers()

        self.exp._barrier()
        pu.run_plugins(self.exp, plugin_list, check=True)

        self.exp._barrier()
        self.exp._clear_data_objects()

        self.exp._barrier()
        cu.user_message("Plugin list check complete!")
Example #17
0
    def _run_plugin_list_check(self, plugin_list):
        """ Run the plugin list through the framework without executing the
        main processing.
        """
        self.exp._barrier()
        self.__check_loaders_and_savers()

        self.exp._barrier()
        pu.run_plugins(self.exp, plugin_list, check=True)

        self.exp._barrier()
        self.exp._clear_data_objects()

        self.exp._barrier()
        cu.user_message("Plugin list check complete!")
Example #18
0
 def check_file_path(self, file_path):
     file_ext = ".tif"
     if file_path is None:
         msg = "!!! Please provide a file path to the MTF !!!"
         logging.warn(msg)
         cu.user_message(msg)
         raise ValueError(msg)
     else:
         if not os.path.isfile(file_path):
             msg = "!!! No such file: %s !!!"\
                     " Please check the file path" %str(file_path)
             logging.warn(msg)
             cu.user_message(msg)
             raise ValueError(msg)
         else:
             _, file_ext = os.path.splitext(file_path)
     return file_ext
Example #19
0
    def __get_outer_pad(self):
        factor = math.sqrt(2)-1  # length of diagonal of square is side*sqrt(2)
        pad = self.parameters['outer_pad'] if 'outer_pad' in \
            self.parameters.keys() else False

        if pad is not False and not self.padding_alg:
            msg = 'This reconstruction algorithm cannot be padded.'
            cu.user_message(msg)
            return 0

        if isinstance(pad, bool):
            return factor if pad is True else 0
        factor = float(pad)
        if factor > MAX_OUTER_PAD:
            factor = MAX_OUTER_PAD
            msg = 'Maximum outer_pad value is 2.1, using this instead'
            cu.user_message(msg)
        return float(pad)
Example #20
0
 def _coarse_search(self, sino, start_cor, stop_cor, ratio, drop):
     """
     Coarse search for finding the rotation center.
     """
     (nrow, ncol) = sino.shape
     start_cor, stop_cor = np.sort((start_cor, stop_cor))
     start_cor = np.int16(np.clip(start_cor, 0, ncol - 1))
     stop_cor = np.int16(np.clip(stop_cor, 0, ncol - 1))
     cen_fliplr = (ncol - 1.0) / 2.0
     # Flip left-right the [0:Pi ] sinogram to make a full [0;2Pi] sinogram
     flip_sino = np.fliplr(sino)
     # Below image is used for compensating the shift of the [Pi;2Pi] sinogram
     # It helps to avoid local minima.
     comp_sino = np.flipud(sino)
     list_cor = np.arange(start_cor, stop_cor + 1.0)
     list_metric = np.zeros(len(list_cor), dtype=np.float32)
     mask = self._create_mask(2 * nrow, ncol, 0.5 * ratio * ncol, drop)
     sino_sino = np.vstack((sino, flip_sino))
     for i, cor in enumerate(list_cor):
         shift = np.int16(2.0 * (cor - cen_fliplr))
         _sino = sino_sino[nrow:]
         _sino[...] = np.roll(flip_sino, shift, axis=1)
         if shift >= 0:
             _sino[:, :shift] = comp_sino[:, :shift]
         else:
             _sino[:, shift:] = comp_sino[:, shift:]
         list_metric[i] = np.mean(
             np.abs(np.fft.fftshift(fft.fft2(sino_sino))) * mask)
     minpos = np.argmin(list_metric)
     if minpos == 0:
         self.error_msg_1 = "!!! WARNING !!! Global minimum is out of "\
         "the searching range. Please extend smin"
         logging.warn(self.error_msg_1)
         cu.user_message(self.error_msg_1)
     if minpos == len(list_metric) - 1:
         self.error_msg_2 = "!!! WARNING !!! Global minimum is out of "\
          "the searching range. Please extend smax"
         logging.warn(self.error_msg_2)
         cu.user_message(self.error_msg_2)
     rot_centre = list_cor[minpos]
     return rot_centre
    def __find_dark_and_flat(self, data_obj):
        ignore = self.parameters['ignore_flats'] if \
            self.parameters['ignore_flats'] else None
        try:
            image_key = \
                data_obj.backing_file[self.parameters['image_key_path']][...]

            from savu.data.data_structures.data_type import ImageKey
            data_obj.data = \
                ImageKey(data_obj, image_key, 0, ignore=ignore)
            #data_obj.set_shape(data_obj.data.get_shape())
        except KeyError:
            cu.user_message("An image key was not found.")
            try:
                from savu.data.data_structures.data_type import NoImageKey
                data_obj.data = NoImageKey(data_obj, None, 0)
                entry = 'entry1/tomo_entry/instrument/detector/'
                data_obj.data._set_flat_path(entry + 'flatfield')
                data_obj.data._set_dark_path(entry + 'darkfield')
            except KeyError:
                cu.user_message("Dark/flat data was not found in input file.")
    def __find_dark_and_flat(self, data_obj):
        ignore = self.parameters['ignore_flats'] if \
            self.parameters['ignore_flats'] else None
        try:
            image_key = \
                data_obj.backing_file[self.parameters['image_key_path']][...]

            from savu.data.data_structures.data_type import ImageKey
            data_obj.data = \
                ImageKey(data_obj, image_key, 0, ignore=ignore)
            #data_obj.set_shape(data_obj.data.get_shape())
        except KeyError:
            cu.user_message("An image key was not found.")
            try:
                from savu.data.data_structures.data_type import NoImageKey
                data_obj.data = NoImageKey(data_obj, None, 0)
                entry = 'entry1/tomo_entry/instrument/detector/'
                data_obj.data._set_flat_path(entry + 'flatfield')
                data_obj.data._set_dark_path(entry + 'darkfield')
            except KeyError:
                cu.user_message("Dark/flat data was not found in input file.")
Example #23
0
    def _run_plugin_list(self):
        """ Create an experiment and run the plugin list.
        """
        self.exp = Experiment(self.options)
        plugin_list = self.exp.meta_data.plugin_list.plugin_list

        logging.info("run_plugin_list: 1")
        self.exp._barrier()
        self._run_plugin_list_check(plugin_list)

        logging.info("run_plugin_list: 2")
        self.exp._barrier()
        expInfo = self.exp.meta_data
        logging.debug("Running process List.save_list_to_file")
        expInfo.plugin_list._save_plugin_list(
            expInfo.get_meta_data("nxs_filename"), exp=self.exp)

        logging.info("run_plugin_list: 3")
        self.exp._barrier()
        self._transport_run_plugin_list()

        logging.info("run_plugin_list: 4")
        self.exp._barrier()

        cu.user_message("***********************")
        cu.user_message("* Processing Complete *")
        cu.user_message("***********************")

        self.exp.nxs_file.close()
        return self.exp
Example #24
0
    def _run_plugin_list(self):
        """ Create an experiment and run the plugin list.
        """
        self.exp = Experiment(self.options)
        plugin_list = self.exp.meta_data.plugin_list.plugin_list

        logging.info("run_plugin_list: 1")
        self.exp._barrier()
        self._run_plugin_list_check(plugin_list)

        logging.info("run_plugin_list: 2")
        self.exp._barrier()
        expInfo = self.exp.meta_data
        logging.debug("Running process List.save_list_to_file")
        expInfo.plugin_list._save_plugin_list(
            expInfo.get_meta_data("nxs_filename"), exp=self.exp)

        logging.info("run_plugin_list: 3")
        self.exp._barrier()
        self._transport_run_plugin_list()

        logging.info("run_plugin_list: 4")
        self.exp._barrier()

        cu.user_message("***********************")
        cu.user_message("* Processing Complete *")
        cu.user_message("***********************")

        self.exp.nxs_file.close()
        return self.exp
Example #25
0
 def _get_min_and_max(self):
     data = self.get_in_datasets()[0]
     pattern = self.parameters['pattern']
     try:
         self.the_min = np.min(data.meta_data.get(['stats', 'min', pattern]))
         self.the_max = np.max(data.meta_data.get(['stats', 'max', pattern]))
         self._data_range = (self.the_min, self.the_max)
     except KeyError:
         self._data_range = 'image'
         if (self.file_format=="tiff") or (self.file_format =="tif"):
             self.the_min = None
             self.the_max = None
             msg = "\n***********************************************\n"\
             "!!!Warning!!!-> No global maximum and global minimum found\n"\
             "in the metadata. Please run the MaxAndMin plugin before\n" \
             "ImageSaver or input manually. Otherwise, local minimum\n" \
             "and local maximum will be used for rescaling. This may\n"\
             "result the fluctuation of brightness between slices.\n"\
             "***********************************************\n"
             if (self.num_bit == 8) or (self.num_bit==16):
                 cu.user_message(msg)
     return self._data_range
Example #26
0
 def __find_dark_and_flat(self, data_obj, flat=None, dark=None):
     ignore = self.parameters['ignore_flats'] if \
         self.parameters['ignore_flats'] else None
     try:
         image_key = data_obj.backing_file[
             'entry1/tomo_entry/instrument/detector/image_key'][...]
         data_obj.data = \
             ImageKey(data_obj, image_key, 0, ignore=ignore)
     except KeyError:
         cu.user_message("An image key was not found.")
         try:
             data_obj.data = NoImageKey(data_obj, None, 0)
             entry = 'entry1/tomo_entry/instrument/detector/'
             data_obj.data._set_flat_path(entry + 'flatfield')
             data_obj.data._set_dark_path(entry + 'darkfield')
         except KeyError:
             cu.user_message("Dark/flat data was not found in input file.")
     data_obj.data._set_dark_and_flat()
     if dark:
         data_obj.data.update_dark(dark)
     if flat:
         data_obj.data.update_flat(flat)
Example #27
0
 def __output_final_message(self):
     kill = True if 'killsignal' in \
         self.exp.meta_data.get_dictionary().keys() else False
     msg = "interrupted by killsignal" if kill else "Complete"
     stars = 40 if kill else 23
     cu.user_message("*"*stars)
     cu.user_message("* Processing " + msg + " *")
     cu.user_message("*"*stars)
Example #28
0
 def __output_final_message(self):
     kill = True if 'killsignal' in \
         self.exp.meta_data.get_dictionary().keys() else False
     msg = "interrupted by killsignal" if kill else "Complete"
     stars = 40 if kill else 23
     cu.user_message("*"*stars)
     cu.user_message("* Processing " + msg + " *")
     cu.user_message("*"*stars)
Example #29
0
    def __run_plugin(self, plugin_dict):
        plugin = self._transport_load_plugin(self.exp, plugin_dict)

        #  ********* transport function ***********
        self._transport_pre_plugin()
        cu.user_message("*Running the %s plugin*" % plugin.name)

        #  ******** transport 'process' function is called inside here ********
        plugin._run_plugin(self.exp, self)  # plugin driver

        self.exp._barrier(msg="Plugin returned from driver in Plugin Runner")
        cu._output_summary(self.exp.meta_data.get("mpi"), plugin)
        plugin._clean_up()
        finalise = self.exp._finalise_experiment_for_current_plugin()

        #  ********* transport function ***********
        self._transport_post_plugin()

        for data in finalise['remove'] + finalise['replace']:
            #  ********* transport function ***********
            self._transport_terminate_dataset(data)

        self.exp._reorganise_datasets(finalise)
Example #30
0
    def __hdf5_file_write_failed_check(self, nBytes, nProcs):
        _2GB = 2e9

        if nBytes / np.float(nProcs) < _2GB:
            return

        msg = "The data is too big for the number of processes, please "
        if self.exp.meta_data.get('femail') == \
                '*****@*****.**':
            n_procs_big = 160  # number of processes for BIG data
            savu_mpi_big = True if nProcs is n_procs_big else False
            if savu_mpi_big or (nBytes / np.float(n_procs_big) >= _2GB):
                if self.exp.meta_data.get('femail'):
                    msg += ("contact %s" % self.exp.meta_data.get('femail'))
                else:
                    msg += "increase the number of cores."
            else:
                msg += "use savu_mpi_big."
        else:
            msg += "increase the number of cores."

        cu.user_message(msg)
        raise Exception(msg)
Example #31
0
    def _run_plugin_list_check(self, plugin_list):
        """ Run the plugin list through the framework without executing the
        main processing.
        """
        plugin_list._check_loaders()
        n_loaders = plugin_list._get_n_loaders()
        self.__check_gpu()
        check_list = np.arange(len(plugin_list.plugin_list)) - n_loaders
        self.__fake_plugin_list_run(plugin_list, check_list, setnxs=True)
        savers_idx_before = plugin_list._get_savers_index()
        plugin_list._add_missing_savers(self.exp.index['in_data'].keys())

        #  ********* transport function ***********
        self._transport_update_plugin_list()

        self.exp._clear_data_objects()

        check_list = np.array(list(set(plugin_list._get_savers_index()).
                              difference(set(savers_idx_before)))) - n_loaders
        self.__fake_plugin_list_run(plugin_list, check_list)

        self.exp._clear_data_objects()
        cu.user_message("Plugin list check complete!")
Example #32
0
    def _process(self, plugin):
        """ Organise required data and execute the main plugin processing.

        :param plugin plugin: The current plugin instance.
        """
        in_data, out_data = plugin.get_datasets()

        expInfo = plugin.exp.meta_data
        in_slice_list, in_global_frame_idx = \
            self.__get_all_slice_lists(in_data, expInfo)
        out_slice_list, _ = self.__get_all_slice_lists(out_data, expInfo)
        plugin.set_global_frame_index(in_global_frame_idx)

        squeeze_dict = self.__set_functions(in_data, 'squeeze')
        expand_dict = self.__set_functions(out_data, 'expand')

        number_of_slices_to_process = len(in_slice_list[0])
        output_counter = -1
        for count in range(number_of_slices_to_process):
            percent_complete = count / (number_of_slices_to_process * 0.01)
            rounded_amount_through = percent_complete // 5
            if (rounded_amount_through) != output_counter:
                cu.user_message("%s - %3i%% complete" %
                                (plugin.name, percent_complete))
                output_counter = rounded_amount_through

            section, slice_list = \
                self.__get_all_padded_data(in_data, in_slice_list, count,
                                           squeeze_dict)
            plugin.set_current_slice_list(slice_list)
            result = plugin.process_frames(section)
            self.__set_out_data(out_data, out_slice_list, result, count,
                                expand_dict)

        cu.user_message("%s - 100%% complete" % (plugin.name))
        plugin._revert_preview(in_data)
Example #33
0
    def _stitch_data(self, dObj, path, data):
        stype, dim = self._get_stitching_info(data)
        remove = data['remove'] if 'remove' in list(data.keys()) else None

        group_name, data_name = os.path.split(path)
        # find all files with the given name
        group = dObj.backing_file.require_group(group_name)
        matches = fnmatch.filter(list(group.keys()), data_name)

        number = []
        for m in matches:
            diff_number = ''
            for diff in difflib.ndiff(m, data_name):
                split = diff.split('- ')
                if len(split) > 1:
                    diff_number += split[-1]
            number.append(int(diff_number))

        matches = [matches[i] for i in np.argsort(number)]
        dObj.data_info.set('wildcard_values', sorted(number))

        data_obj_list = []
        for match in matches:
            match_path = os.path.join(group_name, match)
            sub_obj = self.exp.create_data_object('in_data', match)
            sub_obj.backing_file = dObj.backing_file
            data_obj_list.append(self._setup_data(sub_obj, match_path))
            del self.exp.index['in_data'][match]

        if data_obj_list:
            dObj.data = StitchData(data_obj_list, stype, dim, remove=remove)
            dObj.set_original_shape(dObj.data.get_shape())
        else:
            cu.user_message("The data set %s is empty." % data_name)

        return dObj
Example #34
0
    def _run_plugin_list_setup(self, plugin_list):
        """ Run the plugin list through the framework without executing the
        main processing.
        """
        plugin_list._check_loaders()
        self.__check_gpu()

        n_loaders = self.exp.meta_data.plugin_list._get_n_loaders()
        n_plugins = plugin_list._get_n_processing_plugins()
        plist = plugin_list.plugin_list

        # set loaders
        for i in range(n_loaders):
            pu.plugin_loader(self.exp, plist[i])
            self.exp._set_initial_datasets()

        # run all plugin setup methods and store information in experiment
        # collection
        count = 0
        for plugin_dict in plist[n_loaders:n_loaders + n_plugins]:
            self.__plugin_setup(plugin_dict, count)
            count += 1

        plugin_list._add_missing_savers(self.exp)

        #  ********* transport function ***********
        self._transport_update_plugin_list()

        # check added savers
        for plugin_dict in plist[n_loaders + count:]:
            self.__plugin_setup(plugin_dict, count)
            count += 1

        self.exp._reset_datasets()
        self.exp._finalise_setup(plugin_list)
        cu.user_message("Plugin list check complete!")
Example #35
0
    def process(self, plugin):
        self.process_checks()
        in_data, out_data = plugin.get_datasets()
        expInfo = plugin.exp.meta_data
        in_slice_list = self.get_all_slice_lists(in_data, expInfo)
        out_slice_list = self.get_all_slice_lists(out_data, expInfo)
        squeeze_dict = self.set_functions(in_data, 'squeeze')
        expand_dict = self.set_functions(out_data, 'expand')

        number_of_slices_to_process = len(in_slice_list[0])
        for count in range(number_of_slices_to_process):
            percent_complete = count/(number_of_slices_to_process * 0.01)
            cu.user_message("%s - %3i%% complete" %
                            (plugin.name, percent_complete))

            section, slice_list = \
                self.get_all_padded_data(in_data, in_slice_list, count,
                                         squeeze_dict)
            result = plugin.process_frames(section, slice_list)
            self.set_out_data(out_data, out_slice_list, result, count,
                              expand_dict)

        cu.user_message("%s - 100%% complete" % (plugin.name))
        plugin.revert_preview(in_data)
Example #36
0
    def pre_process(self):
        inData = self.get_in_datasets()[0]
        dark = inData.data.dark()
        flat = inData.data.flat()
        self.data_size = inData.get_shape()
        (self.depth, self.height, self.width) = flat.shape
        file_path = self.parameters["file_path"]
        file_ext = self.check_file_path(file_path)
        if file_ext == ".npy":
            try:
                self.mtf_array = 1.0 * np.load(file_path)
            except IOError:
                msg = "\n*****************************************\n"\
                    "!!! ERROR !!! -> Can't open this file: %s \n"\
                    "*****************************************\n\
                    "                      % str(file_path)
                logging.warn(msg)
                cu.user_message(msg)
                raise ValueError(msg)
        else:
            try:
                self.mtf_array = 1.0 * np.float32(self.load_image(file_path))
            except IOError:
                msg = "\n*****************************************\n"\
                    "!!! ERROR !!! -> Can't open this file: %s \n"\
                    "*****************************************\n\
                    "                      % str(file_path)
                logging.warn(msg)
                cu.user_message(msg)
                raise ValueError(msg)

        self.mtf_array[self.mtf_array <= 0.0] = 1.0
        self.mtf_array = self.mtf_array / np.max(self.mtf_array)
        (height_mtf, width_mtf) = self.mtf_array.shape
        if (self.height != height_mtf) or (self.width != width_mtf):
            msg = "\n*****************************************\n"\
            "!!! ERROR !!!-> Projection shape: ({0},{1}) is not the same as "\
            "the mtf shape: ({2},{3})".format(
                self.height, self.width, height_mtf, width_mtf)
            logging.warn(msg)
            cu.user_message(msg)
            raise ValueError(msg)

        self.pad_width = np.clip(int(self.parameters["pad_width"]), 0, None)
        if flat.size:
            flat_updated = np.ones_like(flat, dtype=np.float32)
            for i in np.arange(self.depth):
                flat_updated[i] = self.psf_correction(flat[i], self.mtf_array,
                                                      self.pad_width)
            inData.data.update_flat(flat_updated)
Example #37
0
    def _run_plugin_list(self):
        """ Create an experiment and run the plugin list.
        """
        self.exp.checkpoint = None
        plugin_list = self.exp.meta_data.plugin_list
        plugin_list._check_loaders()

        self.exp._set_nxs_filename()
        if self.exp.meta_data.get('process') == 0:
            fname = self.exp.meta_data.get('nxs_filename')
            plugin_list._save_plugin_list(fname)

        self.exp._set_loaders()

        #  ********* transport function ***********
        self._transport_pre_plugin_list_run()

        n_plugins = plugin_list._get_n_processing_plugins()
        n_loaders = self.exp.meta_data.plugin_list._get_n_loaders()

        plist = plugin_list.plugin_list
        count = 0
        for plugin_dict in plist[n_loaders:n_loaders+n_plugins]:
            self.exp.meta_data.set('nPlugin', count)
            self.__run_plugin(plugin_dict)
            count += 1

        # terminate any remaining datasets
        for data in self.exp.index['in_data'].values():
            self._transport_terminate_dataset(data)

        cu.user_message("***********************")
        cu.user_message("* Processing Complete *")
        cu.user_message("***********************")

        logging.info('Processing complete')
        return self.exp
Example #38
0
    def _run_plugin_list(self):
        """ Create an experiment and run the plugin list.
        """
        plugin_list = self.exp.meta_data.plugin_list
        logging.info('Running the plugin list check')
        self._run_plugin_list_check(plugin_list)

        logging.info('Setting up the experiment')
        self.exp._experiment_setup()
        exp_coll = self.exp._get_experiment_collection()
        n_plugins = plugin_list._get_n_processing_plugins()

        #  ********* transport function ***********
        logging.info('Running transport_pre_plugin_list_run()')
        self._transport_pre_plugin_list_run()

        for i in range(n_plugins):
            self.exp._set_experiment_for_current_plugin(i)
            self.__run_plugin(exp_coll['plugin_dict'][i])

        #  ********* transport function ***********
        logging.info('Running transport_post_plugin_list_run')
        self._transport_post_plugin_list_run()

        # terminate any remaining datasets
        for data in self.exp.index['in_data'].values():
            self._transport_terminate_dataset(data)

        cu.user_message("***********************")
        cu.user_message("* Processing Complete *")
        cu.user_message("***********************")

        logging.info('Processing complete')
        if self.exp.meta_data.get('email'):
            cu.send_email(self.exp.meta_data.get('email'))
        return self.exp
Example #39
0
 def _log_completion_status(self, count, nTrans, name):
     percent_complete = count / (nTrans * 0.01)
     cu.user_message("%s - %3i%% complete" % (name, percent_complete))
Example #40
0
 def executive_summary(self):
     if self.msg != "":
         cu.user_message(self.msg)
         raise ValueError(self.msg)
     else:
         return ["Nothing to Report"]
Example #41
0
    def pre_process(self):
        in_pData = self.get_plugin_in_datasets()[0]
        data = self.get_in_datasets()[0]
        name = data.get_name()
        shift = self.exp.meta_data.get(name + '_preview_starts')
        step = self.exp.meta_data.get(name + '_preview_steps')
        x_dim = data.get_data_dimension_by_axis_label('detector_x')
        y_dim = data.get_data_dimension_by_axis_label('detector_y')
        step_check = \
                True if max([step[i] for i in [x_dim, y_dim]]) > 1 else False
        if step_check:
            self.msg = "\n***********************************************\n"\
                "!!! ERROR !!! -> Plugin doesn't work with the step in the "\
                "preview larger than 1 \n"\
                "***********************************************\n"
            logging.warn(self.msg)
            cu.user_message(self.msg)
            raise ValueError(self.msg)

        x_offset = shift[x_dim]
        y_offset = shift[y_dim]
        file_path = self.parameters["file_path"]
        self.msg = ""
        x_center = 0.0
        y_center = 0.0
        if file_path is None:
            x_center = np.asarray(self.parameters['center_from_left'],
                                  dtype=np.float32) - x_offset
            y_center = np.asarray(self.parameters['center_from_top'],
                                  dtype=np.float32) - y_offset
            list_fact = np.float32(self.parameters['polynomial_coeffs'])
        else:
            if not (os.path.isfile(file_path)):
                self.msg = "!!! No such file: %s !!!"\
                        " Please check the file path" %str(file_path)
                cu.user_message(self.msg)
                raise ValueError(self.msg)
            try:
                (x_center, y_center,
                 list_fact) = self.load_metadata_txt(file_path)
                x_center = x_center - x_offset
                y_center = y_center - y_offset
            except IOError:
                self.msg = "\n*****************************************\n"\
                    "!!! ERROR !!! -> Can't open this file: %s \n"\
                    "*****************************************\n\
                    "                      % str(file_path)
                logging.warn(self.msg)
                cu.user_message(self.msg)
                raise ValueError(self.msg)

        data_shape = data.get_shape()
        self.height, self.width = data_shape[y_dim], data_shape[x_dim]
        xu_list = np.arange(self.width) - x_center
        yu_list = np.arange(self.height) - y_center
        xu_mat, yu_mat = np.meshgrid(xu_list, yu_list)
        ru_mat = np.sqrt(xu_mat**2 + yu_mat**2)
        fact_mat = np.sum(np.asarray(
            [factor * ru_mat**i for i, factor in enumerate(list_fact)]),
                          axis=0)
        xd_mat = np.float32(
            np.clip(x_center + fact_mat * xu_mat, 0, self.width - 1))
        yd_mat = np.float32(
            np.clip(y_center + fact_mat * yu_mat, 0, self.height - 1))

        diff_y = np.max(yd_mat) - np.min(yd_mat)
        if (diff_y < 1):
            self.msg = "\n*****************************************\n\n"\
                    "!!! ERROR !!! -> You need to increase the preview size"\
                    " for this plugin to work \n\n"\
                    "*****************************************\n"
            logging.warn(self.msg)
            cu.user_message(self.msg)

            raise ValueError(self.msg)
        self.indices = np.reshape(yd_mat, (-1, 1)),\
                        np.reshape(xd_mat, (-1, 1))
Example #42
0
 def _log_completion_status(self, count, nTrans, name):
     percent_complete = count/(nTrans * 0.01)
     cu.user_message("%s - %3i%% complete" % (name, percent_complete))