def _transport_run_plugin_list(self): """ Run the plugin list inside the transport layer. """ exp = self.exp plugin_obj = exp.meta_data.plugin_list n_loaders = plugin_obj._get_n_loaders() plugin_list = exp.meta_data.plugin_list.plugin_list for i in range(n_loaders): pu.plugin_loader(exp, plugin_list[i]) start = n_loaders stop = 0 n_plugins = len(plugin_list) - 1 # minus 1 for saver while n_plugins != stop: start_in_data = copy.deepcopy(self.exp.index['in_data']) in_data = exp.index["in_data"][exp.index["in_data"].keys()[0]] out_data_objs, stop = in_data._load_data(start) exp._clear_data_objects() self.exp.index['in_data'] = copy.deepcopy(start_in_data) self.__real_plugin_run(plugin_list, out_data_objs, start, stop) start = stop for key in exp.index["in_data"].keys(): exp.index["in_data"][key]._close_file() return
def __fake_plugin_list_run(self, plugin_list, check_list, setnxs=False): """ Run through the plugin list without any processing (setup only)\ and fill in missing dataset names. """ # plugin_list._reset_datasets_list() n_loaders = self.exp.meta_data.plugin_list._get_n_loaders() n_plugins = plugin_list._get_n_processing_plugins() plist = plugin_list.plugin_list for i in range(n_loaders): plugin = pu.plugin_loader(self.exp, plugin_list.plugin_list[i]) if setnxs: self.exp._set_nxs_filename() check = [True if x in check_list else False for x in range(n_plugins)] count = 0 for i in range(n_loaders, n_loaders+n_plugins): self.exp._barrier() plugin = pu.plugin_loader(self.exp, plist[i], check=check[count]) plugin._revert_preview(plugin.get_in_datasets()) plist[i]['cite'] = plugin.get_citation_information() plugin._clean_up() self.exp._merge_out_data_to_in() count += 1
def load_data(self, start): exp = self.exp plugin_list = exp.meta_data.plugin_list.plugin_list final_plugin = plugin_list[-1] saver_plugin = pu.plugin_loader(exp, final_plugin) logging.debug("generating all output files") out_data_objects = [] count = start datasets_list = pu.datasets_list for plugin_dict in plugin_list[start:-1]: self.get_current_and_next_patterns(datasets_list[count - 1 :]) plugin_id = plugin_dict["id"] logging.info("Loading plugin %s", plugin_id) plugin = pu.plugin_loader(exp, plugin_dict) plugin.revert_preview(plugin.get_in_datasets()) self.set_filenames(plugin, plugin_id, count) saver_plugin.setup() out_data_objects.append(exp.index["out_data"].copy()) if self.variable_data_check(plugin): return out_data_objects, count exp.merge_out_data_to_in() count += 1 del self.exp.meta_data.get_dictionary()["current_and_next"] return out_data_objects, count
def _load_data(self, start): exp = self.exp n_loaders = exp.meta_data.plugin_list._get_n_loaders() plugin_list = exp.meta_data.plugin_list.plugin_list final_plugin = plugin_list[-1] saver_plugin = pu.plugin_loader(exp, final_plugin) logging.debug("generating all output files") out_data_objects = [] count = start datasets_list = exp.meta_data.plugin_list._get_datasets_list() for plugin_dict in plugin_list[start:-1]: self._get_current_and_next_patterns( datasets_list[count-n_loaders:]) plugin_id = plugin_dict["id"] logging.info("Loading plugin %s", plugin_id) plugin = pu.plugin_loader(exp, plugin_dict) plugin._revert_preview(plugin.get_in_datasets()) self.__set_filenames(plugin, plugin_id, count) saver_plugin.setup() out_data_objects.append(exp.index["out_data"].copy()) exp._merge_out_data_to_in() count += 1 self.exp.meta_data.delete('current_and_next') return out_data_objects, count
def transport_run_plugin_list(self): """ Runs a chain of plugins """ exp = self.exp plugin_list = exp.meta_data.plugin_list.plugin_list pu.plugin_loader(exp, plugin_list[0]) start = 1 stop = start n_plugins = len(plugin_list[start:-1]) + 1 while n_plugins != stop: start_in_data = copy.deepcopy(self.exp.index['in_data']) in_data = exp.index["in_data"][exp.index["in_data"].keys()[0]] out_data_objs, stop = in_data.load_data(start) exp.clear_data_objects() self.exp.index['in_data'] = copy.deepcopy(start_in_data) self.real_plugin_run(plugin_list, out_data_objs, start, stop) start = stop for key in exp.index["in_data"].keys(): exp.index["in_data"][key].close_file() return
def __load_data(self): self._exp.meta_data.set('checkpoint_loader', True) temp = self._exp.meta_data.get('data_file') nxsfile = self._exp.meta_data.get('nxs_filename') self._exp.meta_data.set('data_file', nxsfile) pid = 'savu.plugins.loaders.savu_nexus_loader' pu.plugin_loader(self._exp, {'id': pid, 'data': {}}) self._exp.meta_data.delete('checkpoint_loader') self._exp.meta_data.set('data_file', temp)
def plugin_runner_load_plugin(options): plugin_runner = PluginRunner(options) plugin_runner.exp = Experiment(options) plugin_list = plugin_runner.exp.meta_data.plugin_list.plugin_list exp = plugin_runner.exp pu.plugin_loader(exp, plugin_list[0]) exp._set_nxs_filename() plugin_dict = plugin_list[1] plugin = pu.load_plugin(plugin_dict['id']) plugin.exp = exp return plugin
def real_plugin_run(self, plugin_list, out_data_objs, start, stop): exp = self.exp for i in range(start, stop): link_type = "final_result" if i is len(plugin_list)-2 else \ "intermediate" exp.barrier() for key in out_data_objs[i - start]: exp.index["out_data"][key] = out_data_objs[i - start][key] exp.barrier() plugin = pu.plugin_loader(exp, plugin_list[i]) exp.barrier() cu.user_message("*Running the %s plugin*" % (plugin_list[i]['id'])) plugin.run_plugin(exp, self) exp.barrier() if self.mpi: cu.user_message_from_all(plugin.name, plugin.executive_summary()) else: cu.user_message("%s - %s" % (plugin.name, plugin.executive_summary())) exp.barrier() out_datasets = plugin.parameters["out_datasets"] exp.reorganise_datasets(out_datasets, link_type)
def __plugin_setup(self, plugin_dict, count): """ Determine plugin specific information. """ plugin_id = plugin_dict["id"] logging.debug("Loading plugin %s", plugin_id) # Run main_setup method plugin = pu.plugin_loader(self, plugin_dict) plugin._revert_preview(plugin.get_in_datasets()) # Populate the metadata plugin._clean_up() data = self.index['out_data'].copy() return data
def plugin_runner_real_plugin_run(options): plugin_runner = PluginRunner(options) plugin_runner.exp = Experiment(options) plugin_list = plugin_runner.exp.meta_data.plugin_list.plugin_list plugin_runner._run_plugin_list_check(plugin_list) exp = plugin_runner.exp pu.plugin_loader(exp, plugin_list[0]) start_in_data = copy.deepcopy(exp.index['in_data']) in_data = exp.index["in_data"][exp.index["in_data"].keys()[0]] out_data_objs, stop = in_data._load_data(1) exp._clear_data_objects() exp.index['in_data'] = copy.deepcopy(start_in_data) for key in out_data_objs[0]: exp.index["out_data"][key] = out_data_objs[0][key] plugin = pu.plugin_loader(exp, plugin_list[1]) plugin._run_plugin(exp, plugin_runner)
def __fake_plugin_list_run(self, plugin_list, check_list): """ Run through the plugin list without any processing (setup only)\ and fill in missing dataset names. """ # plugin_list._reset_datasets_list() n_loaders = self.exp.meta_data.plugin_list._get_n_loaders() n_plugins = plugin_list._get_n_processing_plugins() plist = plugin_list.plugin_list for i in range(n_loaders): plugin = pu.plugin_loader(self.exp, plugin_list.plugin_list[i]) check = [True if x in check_list else False for x in range(n_plugins)] count = 0 for i in range(n_loaders, n_loaders+n_plugins): self.exp._barrier() plugin = pu.plugin_loader(self.exp, plist[i], check=check[count]) plugin._revert_preview(plugin.get_in_datasets()) plist[i]['cite'] = plugin.get_citation_information() plugin._clean_up() self.exp._merge_out_data_to_in() count += 1
def _run_plugin_list_setup(self, plugin_list): """ Run the plugin list through the framework without executing the main processing. """ plugin_list._check_loaders() self.__check_gpu() n_loaders = self.exp.meta_data.plugin_list._get_n_loaders() n_plugins = plugin_list._get_n_processing_plugins() plist = plugin_list.plugin_list # set loaders for i in range(n_loaders): pu.plugin_loader(self.exp, plist[i]) self.exp._set_initial_datasets() # run all plugin setup methods and store information in experiment # collection count = 0 for plugin_dict in plist[n_loaders:n_loaders + n_plugins]: self.__plugin_setup(plugin_dict, count) count += 1 plugin_list._add_missing_savers(self.exp) # ********* transport function *********** self._transport_update_plugin_list() # check added savers for plugin_dict in plist[n_loaders + count:]: self.__plugin_setup(plugin_dict, count) count += 1 self.exp._reset_datasets() self.exp._finalise_setup(plugin_list) cu.user_message("Plugin list check complete!")
def _set_loaders(self): n_loaders = self.meta_data.plugin_list._get_n_loaders() plugin_list = self.meta_data.plugin_list.plugin_list for i in range(n_loaders): pu.plugin_loader(self, plugin_list[i]) self.initial_datasets = copy.deepcopy(self.index['in_data'])
def _transport_load_plugin(self, exp, plugin_dict): """ This method is called before each plugin is loaded """ return pu.plugin_loader(exp, plugin_dict)