def run_plugin_list(self, input_file, plugin_list, processing_dir, processes=["CPU0"], process=0): """Runs a chain of plugins :param input_file: The input file name. :type input_file: str. :param plugin_list: Plugin list. :type plugin_list: savu.data.structure.PluginList. :param processing_dir: Location of the processing directory. :type processing_dir: str. :param mpi: Whether this is running in mpi, default is false. :type mpi: bool. """ data_file_handler = pu.load_raw_data(input_file) #*** temporary data file handler: Quick fix data_file_handler.rotation_angle = \ data_file_handler.rotation_angle[data_file_handler.image_key == 0] #*** moved this from timeseries_field_correction (not currently done for hdf5_transport) logging.debug("processing Plugins") with closing(Context()) as context: targets = context.targets previous_plugin = None out_data = None with closing(Context(targets=targets)) as context: plugin = pu.load_plugin(plugin_list.plugin_list[0]['id']) in_data = self.load_data(context, input_file, plugin) for plugin_dict in plugin_list.plugin_list: logging.debug("Loading plugin %s", plugin_dict['id']) plugin = pu.load_plugin(plugin_dict['id']) plugin.set_parameters(plugin_dict['data']) [in_data, out_data] = self.create_data_object(context, in_data, out_data, previous_plugin, plugin) # TODO #out_data = plugin.get_output_data(in_data) logging.debug("Starting processing plugin %s", plugin_dict['id']) plugin.run_plugin(in_data, out_data, processes, process, data_file_handler, self) logging.debug("Completed processing plugin %s", plugin_dict['id']) in_data = out_data previous_plugin = plugin group_name = "process_complete" self.output_data(data_file_handler, in_data, plugin_list, processing_dir, group_name)
def check_loaders_and_savers(self, plugin_list): first_plugin = plugin_list[0] end_plugin = plugin_list[-1] plugin = pu.load_plugin(first_plugin['id']) # check the first plugin is a loader if not isinstance(plugin, BaseLoader): sys.exit("The first plugin in the process must " "inherit from BaseLoader") plugin = pu.load_plugin(end_plugin['id']) # check the final plugin is a saver if not isinstance(plugin, BaseSaver): sys.exit("The final plugin in the process must " "inherit from BaseSaver")
def testGetPlugin(self): plugin = pu.load_plugin("savu.plugins.plugin") self.assertEqual(plugin.__class__, test_plugin.Plugin, "Failed to load the correct class") self.assertRaises(NotImplementedError, plugin.process, "test", "test", 1, 1) self.assertRaises(NotImplementedError, plugin.required_data_type)
def test_get_plugin(self): try : plugin = pu.load_plugin(self.plugin_name) self.assertIsNotNone(plugin) except ImportError as e: print("Failed to run plugin test as libraries not available (%s), passing test" % (e)) pass
def test_process(self): try: plugin = pu.load_plugin(self.plugin_name) if self.plugin_name == base_class_name: self.assertRaises(NotImplementedError, plugin.process, "test", "test", 1, 1) return # load appropriate data data = tu.get_appropriate_input_data(plugin) self.assertGreater(len(data), 0, "Cannot find appropriate test data") # generate somewhere for the data to go output = tu.get_appropriate_output_data(plugin, data) self.assertGreater(len(output), 0, "Cannot create appropriate output data") plugin.set_parameters(None) for i in range(len(data)): plugin.run_plugin(data[i], output[i], ["CPU0"], 0) print("Output from plugin under test ( %s ) is in %s" % (plugin.name, output[i].backing_file.filename)) data[i].complete() output[i].complete() except ImportError as e: print("Failed to run plugin test as libraries not available (%s), passing test" % (e)) pass
def test_process(self): try: plugin = pu.load_plugin(self.plugin_name) if self.plugin_name == base_class_name: self.assertRaises(NotImplementedError, plugin.process, "test", "test", 1, 1) return # load appropriate data data = tu.get_appropriate_input_data(plugin) self.assertGreater(len(data), 0, "Cannot find appropriate test data") # generate somewhere for the data to go output = tu.get_appropriate_output_data(plugin, data) self.assertGreater(len(output), 0, "Cannot create appropriate output data") plugin.set_parameters(None) for i in range(len(data)): plugin.run_process(data[i], output[i], ["CPU0"], 0) print("Output from plugin under test ( %s ) is in %s" % (plugin.name, output[i].backing_file.filename)) data[i].complete() output[i].complete() except ImportError as e: print( "Failed to run plugin test as libraries not available (%s), passing test" % (e)) pass
def test_get_plugins_path_and_load(self): savu_path = os.path.split(savu.__path__[0])[0] plugin_path = os.path.join(savu_path, "plugin_examples") os.environ["SAVU_PLUGINS_PATH"] = plugin_path pu.get_plugins_paths() plugin = pu.load_plugin("example_median_filter") self.assertEqual(plugin.name, "ExampleMedianFilter") os.environ["SAVU_PLUGINS_PATH"] = ""
def test_get_plugin(self): try: plugin = pu.load_plugin(self.plugin_name) self.assertIsNotNone(plugin) except ImportError as e: print( "Failed to run plugin test as libraries not available (%s), passing test" % (e)) pass
def plugin_runner_load_plugin(options): plugin_runner = PluginRunner(options) plugin_runner.exp = Experiment(options) plugin_list = plugin_runner.exp.meta_data.plugin_list.plugin_list exp = plugin_runner.exp pu.plugin_loader(exp, plugin_list[0]) exp._set_nxs_filename() plugin_dict = plugin_list[1] plugin = pu.load_plugin(plugin_dict['id']) plugin.exp = exp return plugin
def test_pipeline(self): logging.debug("Starting test_pipeline") if not hasattr(self, 'temp_dir'): self.temp_dir = tempfile.gettempdir() input_data = None first_plugin = pu.load_plugin(self.plugin_list[0]) if self.plugin_list[0] == base_class_name: return if not hasattr(self, 'input_file'): input_data = tu.get_appropriate_input_data(first_plugin)[0] else: input_data = RawTimeseriesData() input_data.populate_from_nexus(self.input_file) logging.debug("Starting to run the processing chain") process.run_plugin_chain(input_data, self.plugin_list, self.temp_dir)
def set_plugin_list(options, pnames, *args): args = args[0] if args else None plugin_names = pnames if isinstance(pnames, list) else [pnames] options['plugin_list'] = [] ID = [options['loader'], options['saver']] data = [{}, {}] if not args else [args[0], args[-1]] for i in range(len(plugin_names)): ID.insert(i+1, plugin_names[i]) plugin = pu.load_plugin(plugin_names[i]) data_dict = set_data_dict(['tomo'], get_output_datasets(plugin)) data_dict = args[i+1] if args else data_dict data.insert(i+1, data_dict) for i in range(len(ID)): name = pu.module2class(ID[i].split('.')[-1]) options['plugin_list'].append(set_plugin_entry(name, ID[i], data[i]))
def test_pipeline(self): if self.process_filename is None: return temp_dir = tempfile.gettempdir() process_list = ProcessList() process_list.populate_process_list(self.process_filename) first_plugin = pu.load_plugin(process_list.process_list[0]['id']) input_data = tu.get_appropriate_input_data(first_plugin)[0] try : process.run_process_list(input_data, process_list, temp_dir) except ImportError as e: print("Failed to run plugin test as libraries not available (%s), passing test" % (e)) pass
def run_plugin_chain(input_data, plugin_list, processing_dir, mpi=False, processes=["CPU0"], process=0): """Runs a chain of plugins :param input_data: The input data to give to the chain :type input_data: savu.data.structure. :param plugin_list: Names of all the plugins to process in order. :type plugin_list: list of str. :param processing_dir: Location of the processing directory. :type processing_dir: str. :param mpi: Whether this is running in mpi, default is false. :type mpi: bool. """ logging.debug("Starting run_plugin_chain") in_data = input_data output = None count = 0 for plugin_name in plugin_list: logging.debug("Loading plugin %s", plugin_name) plugin = pu.load_plugin(plugin_name) # generate somewhere for the data to go file_name = os.path.join(processing_dir, "%02i_%s.h5" % (count, plugin.name)) logging.debug("Creating output file : %s", file_name) output = pu.create_output_data(plugin, in_data, file_name, plugin.name, mpi) plugin.set_parameters(None) logging.debug("Starting processing plugin %s", plugin_name) plugin.run_plugin(in_data, output, processes, process) logging.debug("Completed processing plugin %s", plugin_name) if in_data is not output: in_data.complete() in_data = output if mpi: logging.debug("MPI awaiting barrier") MPI.COMM_WORLD.barrier() count += 1 if output is not None: output.complete()
def process_init(path2plugin, inputs, parameters): parameters['in_datasets'] = [inputs['dataset_name']] parameters['out_datasets'] = [inputs['dataset_name']] plugin = load_plugin(path2plugin.strip('.py')) plugin.exp = setup_exp_and_data(inputs, inputs['data'], plugin) plugin._set_parameters(parameters) plugin._set_plugin_datasets() plugin.setup() axis_labels = plugin.get_out_datasets()[0].get_axis_label_keys() axis_labels.remove('idx') # get the labels axis_values = {} plugin._clean_up() # this copies the metadata! for label in axis_labels: axis_values[label] = plugin.get_out_datasets()[0].meta_data.get_meta_data(label) plugin.base_pre_process() plugin.pre_process() print "I went here" return plugin, axis_labels, axis_values
def test_pipeline(self): if self.process_filename is None: return temp_dir = tempfile.gettempdir() process_list = ProcessList() process_list.populate_process_list(self.process_filename) first_plugin = pu.load_plugin(process_list.process_list[0]['id']) input_data = tu.get_appropriate_input_data(first_plugin)[0] try: process.run_process_list(input_data, process_list, temp_dir) except ImportError as e: print( "Failed to run plugin test as libraries not available (%s), passing test" % (e)) pass
def run_plugin_list(self, input_file, plugin_list, processing_dir, mpi=False, processes=["CPU0"], process=0): """Runs a chain of plugins :param input_data: The input data to give to the chain :type input_data: savu.data.structure. :param plugin_list: Plugin list. :type plugin_list: savu.data.structure.PluginList. :param processing_dir: Location of the processing directory. :type processing_dir: str. :param mpi: Whether this is running in mpi, default is false. :type mpi: bool. """ input_data = pu.load_raw_data(input_file) logging.debug("Running plugin list, just a check") filename = os.path.basename(input_data.backing_file.filename) filename = os.path.splitext(filename)[0] output_filename = \ os.path.join(processing_dir, "%s_processed_%s.nxs" % (filename, time.strftime("%Y%m%d%H%M%S"))) if process == 0: logging.debug("Running process List.save_list_to_file") plugin_list.save_list_to_file(output_filename) in_data = input_data output = None logging.debug("generating all output files") files = [] count = 0 for plugin_dict in plugin_list.plugin_list: logging.debug("Loading plugin %s", plugin_dict['id']) plugin = pu.load_plugin(plugin_dict['id']) # generate somewhere for the data to go file_name = os.path.join(processing_dir, "%s%02i_%s.h5" % (plugin_list.name, count, plugin_dict['id'])) group_name = "%i-%s" % (count, plugin.name) logging.debug("Creating output file %s", file_name) output = pu.create_output_data(plugin, in_data, file_name, group_name, mpi) files.append(output) in_data = output count += 1 logging.debug("processing Plugins") in_data = input_data count = 0 for plugin_dict in plugin_list.plugin_list: logging.debug("Loading plugin %s", plugin_dict['id']) plugin = pu.load_plugin(plugin_dict['id']) output = files[count] plugin.set_parameters(plugin_dict['data']) logging.debug("Starting processing plugin %s", plugin_dict['id']) plugin.run_plugin(in_data, output, processes, process, self) logging.debug("Completed processing plugin %s", plugin_dict['id']) if in_data is not output: in_data.complete() in_data = output if mpi: logging.debug("Blocking till all processes complete") MPI.COMM_WORLD.Barrier() if plugin == 0: cite_info = plugin.get_citation_information() if cite_info is not None: plugin_list.add_plugin_citation(output_filename, count, cite_info) group_name = "%i-%s" % (count, plugin.name) plugin_list.add_intermediate_data_link(output_filename, output, group_name) count += 1 if output is not None: output.complete()
def plugin_setup(self): ppath = 'savu.plugins.reconstructions.astra_recons.astra_recon_cpu' plugin = pu.load_plugin(ppath) return plugin
global_data = None if RANK == 0: temp_file = tempfile.NamedTemporaryFile(dir=options.directory, suffix='.h5', delete=False) logging.debug("Created output file name is : %s", temp_file.name) global_data = {'file_name': temp_file.name} logging.debug("Plugin List is:") logging.debug(plugin_list) global_data = MPI.COMM_WORLD.bcast(global_data, root=0) logging.debug("Output file name is : %s", global_data['file_name']) logging.debug("Loading first plugin %s", plugin_list[0]) first_plugin = pu.load_plugin(None, plugin_list[0]) logging.debug("Getting input data") input_data = tu.get_appropriate_input_data(first_plugin)[0] logging.debug("Running plugin chain") process.run_plugin_chain(input_data, plugin_list, options.directory, mpi=True, processes=ALL_PROCESSES, process=RANK) MPI.COMM_WORLD.barrier()
def test_get_plugin_external_path(self): savu_path = os.path.split(savu.__path__[0])[0] plugin = pu.load_plugin(os.path.join(savu_path, "plugin_examples", "example_median_filter")) self.assertEqual(plugin.name, "ExampleMedianFilter")
logging.debug("ip address is : %s", IP) MPI.COMM_WORLD.barrier() global_data = None if RANK == 0: temp_file = tempfile.NamedTemporaryFile(dir=options.directory, suffix='.h5', delete=False) logging.debug("Created output file name is : %s", temp_file.name) global_data = {'file_name': temp_file.name} logging.debug("Plugin List is:") logging.debug(plugin_list) global_data = MPI.COMM_WORLD.bcast(global_data, root=0) logging.debug("Output file name is : %s", global_data['file_name']) logging.debug("Loading first plugin %s", plugin_list[0]) first_plugin = pu.load_plugin(None, plugin_list[0]) logging.debug("Getting input data") input_data = tu.get_appropriate_input_data(first_plugin)[0] logging.debug("Running plugin chain") process.run_plugin_chain(input_data, plugin_list, options.directory, mpi=True, processes=ALL_PROCESSES, process=RANK) MPI.COMM_WORLD.barrier()
def plugin_setup(self): ppath = 'savu.plugins.reconstructions.scikitimage_sart' plugin = pu.load_plugin(ppath) return plugin
def testGetPlugin(self): plugin = pu.load_plugin("savu.plugins.plugin") self.assertEqual(plugin.__class__, test_plugin.Plugin, "Failed to load the correct class") self.assertRaises(NotImplementedError, plugin.process, None, None, None)
logging.debug("Rank : %i - Size : %i", RANK, SIZE) IP = socket.gethostbyname(socket.gethostname()) logging.debug("ip address is : %s", IP) call_mpi_barrier() import os logging.debug(os.getenv('LD_LIBRARY_PATH')) call_mpi_barrier() process_filename = tu.get_test_data_path(options.process_filename) process_list = ProcessList() process_list.populate_process_list(process_filename) first_plugin = pu.load_plugin(process_list.process_list[0]['id']) input_data = tu.get_appropriate_input_data(first_plugin)[0] process.run_process_list(input_data, process_list, options.directory, mpi=True, processes=ALL_PROCESSES, process=RANK) call_mpi_barrier()
def test_get_plugin(self): plugin = pu.load_plugin(self.plugin_name) self.assertIsNotNone(plugin)
global_data = None if RANK == 0: temp_file = tempfile.NamedTemporaryFile(dir=options.directory, suffix='.h5', delete=False) logging.debug("Created output file name is : %s", temp_file.name) global_data = {'file_name': temp_file.name} global_data = MPI.COMM_WORLD.bcast(global_data, root=0) logging.debug("Output file name is : %s", global_data['file_name']) if 'CPU' in MACHINE_RANK_NAME: logging.debug("Loading plugin %s", options.plugin) plugin = pu.load_plugin(None, options.plugin) logging.debug("Loaded plugin %s", options.plugin) # load appropriate data logging.debug("Loading test data") data = tu.get_appropriate_input_data(plugin) if data is None: logging.error("Cannot create appropriate input data") raise Exception("Cannot create appropriate input data") # generate somewhere for the data to go logging.debug("Sorting out output data") output = \ tu.get_appropriate_output_data(plugin, data, mpi=True, file_name=global_data['file_name'])
logging.info("Starting the test process") logging.debug("Rank : %i - Size : %i", RANK, SIZE) IP = socket.gethostbyname(socket.gethostname()) logging.debug("ip address is : %s", IP) call_mpi_barrier() import os logging.debug(os.getenv("LD_LIBRARY_PATH")) call_mpi_barrier() process_filename = tu.get_test_data_path(options.process_filename) process_list = ProcessList() process_list.populate_process_list(process_filename) first_plugin = pu.load_plugin(process_list.process_list[0]["id"]) input_data = tu.get_appropriate_input_data(first_plugin)[0] process.run_process_list( input_data, process_list, options.directory, mpi=True, processes=ALL_PROCESSES, process=RANK ) call_mpi_barrier()
def testfind_args(self): plugin = pu.load_plugin("savu.plugins.filters.denoise_bregman_filter") params = pu.find_args(plugin) self.assertEqual(len(params), 5)
def test_get_plugin_external_path(self): savu_path = os.path.split(savu.__path__[0])[0] plugin = pu.load_plugin( os.path.join(savu_path, "plugin_examples", "example_median_filter")) self.assertEqual(plugin.name, "ExampleMedianFilter")
def testfind_args(self): plugin = pu.load_plugin("savu.plugins.denoise_bregman_filter") params = pu.find_args(plugin) self.assertEqual(len(params), 4)
MPI.COMM_WORLD.barrier() logging.info("Starting the test process") logging.debug("Rank : %i - Size : %i", RANK, SIZE) IP = socket.gethostbyname(socket.gethostname()) logging.debug("ip address is : %s", IP) MPI.COMM_WORLD.barrier() import os logging.debug(os.getenv('LD_LIBRARY_PATH')) MPI.COMM_WORLD.barrier() process_filename = tu.get_test_data_path(options.process_filename) process_list = ProcessList() process_list.populate_process_list(process_filename) first_plugin = pu.load_plugin(process_list.process_list[0]['id']) input_data = tu.get_appropriate_input_data(first_plugin)[0] process.run_process_list(input_data, process_list, options.directory, mpi=True, processes=ALL_PROCESSES, process=RANK) MPI.COMM_WORLD.barrier()
def testGetPlugin(self): plugin = pu.load_plugin("savu.plugins.plugin") self.assertEqual(plugin.__class__, test_plugin.Plugin, "Failed to load the correct class") self.assertRaises(NotImplementedError, plugin.process_frames, None)
def test_create_smaller_data_block(self): data = tu.get_nx_tomo_test_data() plugin = pu.load_plugin("savu.plugins.downsample_filter") output = tu.get_appropriate_output_data(plugin, data)[0] self.assertEqual(output.get_data_shape(), (111, 68, 80))