Esempio n. 1
0
 def test_run_plugin(self):
     """Verify run_plugin convenience function correctly executes"""
     root, ext = os.path.splitext(os.path.basename(self.datafile))
     output_fname = os.path.join(pathfinder.batchoutput_path(),
                                 root + ".hdf5")
     batchui_ctrl.run_plugin(self.toolkit_class,
                             self.datafile,
                             save_data=False)
     self.assertFalse(os.path.exists(output_fname))
     batchui_ctrl.run_plugin(self.toolkit_class,
                             self.datafile,
                             save_data=True)
     self.assertTrue(os.path.exists(output_fname))
     plugin_names, plugin_classes = self.get_available_plugins()
     for idx in range(len(plugin_names)):
         if plugin_names[idx] == self.toolkit_class:
             plugin_instance = plugin_classes[idx]()
             plugin_instance.data = dataio.get_data(self.datafile)
             plugin_instance.run()
             expected_data = plugin_instance.data
             stored_data = dataio.get_data(output_fname)
             self.assertTrue(np.array_equal(expected_data, stored_data))
             break
     if os.path.exists(output_fname):
         try:
             os.remove(output_fname)
         except WindowsError:  # file in use (Windows)
             pass
         except OSError:  # other OS error
             pass
Esempio n. 2
0
 def test_run_plugin(self):
     """Verify run_plugin convenience function correctly executes"""
     root, ext = os.path.splitext(os.path.basename(self.datafile))
     output_fname = os.path.join(pathfinder.batchoutput_path(), root + ".hdf5")
     batchui_ctrl.run_plugin(self.toolkit_class, self.datafile, save_data=False)
     self.assertFalse(os.path.exists(output_fname))
     batchui_ctrl.run_plugin(self.toolkit_class, self.datafile, save_data=True)
     self.assertTrue(os.path.exists(output_fname))
     plugin_names, plugin_classes = self.get_available_plugins()
     for idx in range(len(plugin_names)):
         if plugin_names[idx] == self.toolkit_class:
             plugin_instance = plugin_classes[idx]()
             plugin_instance.data = dataio.get_data(self.datafile)
             plugin_instance.run()
             expected_data = plugin_instance.data
             stored_data = dataio.get_data(output_fname)
             self.assertTrue(np.array_equal(expected_data, stored_data))
             break
     if os.path.exists(output_fname):
         try:
             os.remove(output_fname)
         except WindowsError: # file in use (Windows)
             pass
         except OSError: # other OS error
             pass
Esempio n. 3
0
 def test_run_plugin_multi_datasets(self):
     """Verify run_plugin convenience function correctly handles datafiles with
     multiple datasets"""
     sample_data_folder = os.path.join(pathfinder.app_path(), 'models',
                                       'tests', 'support_files')
     sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc')
     utwin_data = dataio.get_utwin_data(sample_utwin_file)
     expected_utwin_data = {}
     for data_type in utwin_data.keys():
         for idx in range(len(utwin_data[data_type])):
             expected_utwin_data[data_type +
                                 str(idx)] = utwin_data[data_type][idx]
     output_fnames = []
     root, ext = os.path.splitext(os.path.basename(sample_utwin_file))
     for dataset in expected_utwin_data:
         output_fnames.append(
             os.path.join(pathfinder.batchoutput_path(),
                          root + "_" + dataset + ".hdf5"))
     # Verify no output saved
     batchui_ctrl.run_plugin(self.toolkit_class,
                             sample_utwin_file,
                             save_data=False)
     for fname in output_fnames:
         self.assertFalse(os.path.exists(fname))
     # Verify output saved
     batchui_ctrl.run_plugin(self.toolkit_class,
                             sample_utwin_file,
                             save_data=True)
     for dataset in expected_utwin_data:
         if expected_utwin_data[dataset] is not None:
             fname = os.path.join(pathfinder.batchoutput_path(),
                                  root + "_" + dataset + ".hdf5")
             self.assertTrue(os.path.exists(fname))
             plugin_names, plugin_classes = self.get_available_plugins()
             for idx in range(len(plugin_names)):
                 if plugin_names[idx] == self.toolkit_class:
                     plugin_instance = plugin_classes[idx]()
                     plugin_instance.data = expected_utwin_data[dataset]
                     plugin_instance.run()
                     expected_data = plugin_instance.data
                     returned_data = dataio.get_data(fname)
                     self.assertTrue(
                         np.array_equal(expected_data, returned_data))
                     break
     for fname in output_fnames:
         try:
             if os.path.exists(fname):
                 os.remove(fname)
         except WindowsError:  # file in use (Windows)
             pass
         except OSError:  # other OS error
             pass
Esempio n. 4
0
 def test_run_plugin_multi_datasets(self):
     """Verify run_plugin convenience function correctly handles datafiles with
     multiple datasets"""
     sample_data_folder = os.path.join(pathfinder.app_path(), 'models', 'tests', 'support_files')
     sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc')
     utwin_data = dataio.get_utwin_data(sample_utwin_file)
     expected_utwin_data = {}
     for data_type in utwin_data.keys():
         for idx in range(len(utwin_data[data_type])):
             expected_utwin_data[data_type + str(idx)] = utwin_data[data_type][idx]
     output_fnames = []
     root, ext = os.path.splitext(os.path.basename(sample_utwin_file))
     for dataset in expected_utwin_data:
         output_fnames.append(os.path.join(pathfinder.batchoutput_path(), root + "_" + dataset + ".hdf5"))
     # Verify no output saved
     batchui_ctrl.run_plugin(self.toolkit_class, sample_utwin_file, save_data=False)
     for fname in output_fnames:
         self.assertFalse(os.path.exists(fname))
     # Verify output saved
     batchui_ctrl.run_plugin(self.toolkit_class, sample_utwin_file, save_data=True)
     for dataset in expected_utwin_data:
         if expected_utwin_data[dataset] is not None:
             fname = os.path.join(pathfinder.batchoutput_path(), root + "_" + dataset + ".hdf5")
             self.assertTrue(os.path.exists(fname))
             plugin_names, plugin_classes = self.get_available_plugins()
             for idx in range(len(plugin_names)):
                 if plugin_names[idx] == self.toolkit_class:
                     plugin_instance = plugin_classes[idx]()
                     plugin_instance.data = expected_utwin_data[dataset]
                     plugin_instance.run()
                     expected_data = plugin_instance.data
                     returned_data = dataio.get_data(fname)
                     self.assertTrue(np.array_equal(expected_data, returned_data))
                     break
     for fname in output_fnames:
         try:
             if os.path.exists(fname):
                 os.remove(fname)
         except WindowsError: # file in use (Windows)
             pass
         except OSError: # other OS error
             pass
Esempio n. 5
0
     for plugin_name in available_plugins_names:
         print("\t{0}".format(plugin_name))
     sys.exit(1)
 workers = multiprocessing.Pool()
 if args.multiprocess:
     print("Using multiprocessing mode, {0} simultaneous processes".format(multiprocessing.cpu_count()))
 if args.input_files:
     for _f in args.input_files:
             paths = glob.glob(_f)
             for _p in paths:
                 if not args.multiprocess:
                     print("\nProcessing {0}...".format(_p))
                     if args.toolkit:
                         batchui_ctrl.run_plugin(toolkit=args.toolkit,
                                                 input_file=_p,
                                                 toolkit_config=args.toolkit_config,
                                                 file_type=args.filetype,
                                                 save_data=args.save_output)
                     else:
                         batchui_ctrl.import_data(input_file=_p, file_type=args.filetype)
                 else:
                     print("\nAdding {0} to job list...".format(_p))
                     if args.toolkit:
                         workers.apply_async(batchui_ctrl.run_plugin,
                                             kwds={'toolkit':args.toolkit,
                                                   'input_file':_p,
                                                   'toolkit_config':args.toolkit_config,
                                                   'file_type':args.filetype,
                                                   'save_data':args.save_output})
                     else:
                         workers.apply_async(batchui_ctrl.import_data,
Esempio n. 6
0
         print("\t{0}".format(plugin_name))
     sys.exit(1)
 workers = multiprocessing.Pool()
 if args.multiprocess:
     print("Using multiprocessing mode, {0} simultaneous processes".
           format(multiprocessing.cpu_count()))
 if args.input_files:
     for _f in args.input_files:
         paths = glob.glob(_f)
         for _p in paths:
             if not args.multiprocess:
                 print("\nProcessing {0}...".format(_p))
                 if args.toolkit:
                     batchui_ctrl.run_plugin(
                         toolkit=args.toolkit,
                         input_file=_p,
                         toolkit_config=args.toolkit_config,
                         file_type=args.filetype,
                         save_data=args.save_output)
                 else:
                     batchui_ctrl.import_data(input_file=_p,
                                              file_type=args.filetype)
             else:
                 print("\nAdding {0} to job list...".format(_p))
                 if args.toolkit:
                     workers.apply_async(batchui_ctrl.run_plugin,
                                         kwds={
                                             'toolkit': args.toolkit,
                                             'input_file': _p,
                                             'toolkit_config':
                                             args.toolkit_config,
                                             'file_type': args.filetype,