def test_import_data(self): """Verify import_data successfully imports data""" sample_data_folder = os.path.join(pathfinder.app_path(), 'models', 'tests', 'support_files') sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc') utwin_data = dataio.get_utwin_data(sample_utwin_file) expected_utwin_data = {} for data_type in utwin_data.keys(): for idx in range(len(utwin_data[data_type])): expected_utwin_data[data_type + str(idx)] = utwin_data[data_type][idx] output_fnames = [] root, ext = os.path.splitext(os.path.basename(sample_utwin_file)) for dataset in expected_utwin_data: output_fnames.append( os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5")) batchui_ctrl.import_data(sample_utwin_file) for dataset in expected_utwin_data: if expected_utwin_data[dataset] is not None: fname = os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5") self.assertTrue(os.path.exists(fname)) self.assertTrue( np.array_equal(expected_utwin_data[dataset], dataio.get_data(fname))) for fname in output_fnames: try: if os.path.exists(fname): os.remove(fname) except WindowsError: # file in use (Windows) pass except OSError: # other OS error pass
def test_import_data(self): """Verify import_data successfully imports data""" sample_data_folder = os.path.join(pathfinder.app_path(), 'models', 'tests', 'support_files') sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc') utwin_data = dataio.get_utwin_data(sample_utwin_file) expected_utwin_data = {} for data_type in utwin_data.keys(): for idx in range(len(utwin_data[data_type])): expected_utwin_data[data_type + str(idx)] = utwin_data[data_type][idx] output_fnames = [] root, ext = os.path.splitext(os.path.basename(sample_utwin_file)) for dataset in expected_utwin_data: output_fnames.append(os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5")) batchui_ctrl.import_data(sample_utwin_file) for dataset in expected_utwin_data: if expected_utwin_data[dataset] is not None: fname = os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5") self.assertTrue(os.path.exists(fname)) self.assertTrue(np.array_equal(expected_utwin_data[dataset], dataio.get_data(fname))) for fname in output_fnames: try: if os.path.exists(fname): os.remove(fname) except WindowsError: # file in use (Windows) pass except OSError: # other OS error pass
if args.multiprocess: print("Using multiprocessing mode, {0} simultaneous processes".format(multiprocessing.cpu_count())) if args.input_files: for _f in args.input_files: paths = glob.glob(_f) for _p in paths: if not args.multiprocess: print("\nProcessing {0}...".format(_p)) if args.toolkit: batchui_ctrl.run_plugin(toolkit=args.toolkit, input_file=_p, toolkit_config=args.toolkit_config, file_type=args.filetype, save_data=args.save_output) else: batchui_ctrl.import_data(input_file=_p, file_type=args.filetype) else: print("\nAdding {0} to job list...".format(_p)) if args.toolkit: workers.apply_async(batchui_ctrl.run_plugin, kwds={'toolkit':args.toolkit, 'input_file':_p, 'toolkit_config':args.toolkit_config, 'file_type':args.filetype, 'save_data':args.save_output}) else: workers.apply_async(batchui_ctrl.import_data, kwds={'input_file':_p, 'file_type':args.filetype}) workers.close() workers.join()
format(multiprocessing.cpu_count())) if args.input_files: for _f in args.input_files: paths = glob.glob(_f) for _p in paths: if not args.multiprocess: print("\nProcessing {0}...".format(_p)) if args.toolkit: batchui_ctrl.run_plugin( toolkit=args.toolkit, input_file=_p, toolkit_config=args.toolkit_config, file_type=args.filetype, save_data=args.save_output) else: batchui_ctrl.import_data(input_file=_p, file_type=args.filetype) else: print("\nAdding {0} to job list...".format(_p)) if args.toolkit: workers.apply_async(batchui_ctrl.run_plugin, kwds={ 'toolkit': args.toolkit, 'input_file': _p, 'toolkit_config': args.toolkit_config, 'file_type': args.filetype, 'save_data': args.save_output }) else: workers.apply_async(batchui_ctrl.import_data,