def test_import_winspect(self): """Verify importing datasets""" output_basename, ext = os.path.splitext(self.sample_data_file) amp_dest_file = os.path.join( pathfinder.data_path(), os.path.basename(output_basename) + "_ampdata0" + ext + ".hdf5") waveform_dest_file = os.path.join( pathfinder.data_path(), os.path.basename(output_basename) + "_waveformdata0" + ext + ".hdf5") self.scan_reader.import_winspect() data_reader = dataio.WinspectDataFile(self.sample_data_file) data_reader.read_data() expected_data_list = data_reader.datasets for dataset in expected_data_list: if "amplitude" in dataset.data_type: dest_file = amp_dest_file elif "waveform" in dataset.data_type: dest_file = waveform_dest_file with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(dataset.data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_winspect(self): """Verify import of Winspect data through convenience function""" sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample_data.sdt') assert(os.path.exists(sample_data_file)) output_basename, ext = os.path.splitext(sample_data_file) amp_dest_file = os.path.join(pathfinder.data_path(), os.path.basename(output_basename) + "_ampdata0" + ext + ".hdf5") waveform_dest_file = os.path.join(pathfinder.data_path(), os.path.basename(output_basename) + "_waveformdata0" + ext + ".hdf5") dataio.import_winspect(sample_data_file) expected_data_list = dataio.get_winspect_data(sample_data_file) for dataset in expected_data_list: if "amplitude" in dataset.data_type: dest_file = amp_dest_file elif "waveform" in dataset.data_type: dest_file = waveform_dest_file with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(dataset.data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_data(self): """Verify import_data successfully imports data""" sample_data_folder = os.path.join(pathfinder.app_path(), 'models', 'tests', 'support_files') sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc') utwin_data = dataio.get_utwin_data(sample_utwin_file) expected_utwin_data = {} for data_type in utwin_data.keys(): for idx in range(len(utwin_data[data_type])): expected_utwin_data[data_type + str(idx)] = utwin_data[data_type][idx] output_fnames = [] root, ext = os.path.splitext(os.path.basename(sample_utwin_file)) for dataset in expected_utwin_data: output_fnames.append(os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5")) batchui_ctrl.import_data(sample_utwin_file) for dataset in expected_utwin_data: if expected_utwin_data[dataset] is not None: fname = os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5") self.assertTrue(os.path.exists(fname)) self.assertTrue(np.array_equal(expected_utwin_data[dataset], dataio.get_data(fname))) for fname in output_fnames: try: if os.path.exists(fname): os.remove(fname) except WindowsError: # file in use (Windows) pass except OSError: # other OS error pass
def test_import_winspect(self): """Verify importing datasets""" output_basename, ext = os.path.splitext(self.sample_data_file) amp_dest_file = os.path.join(pathfinder.data_path(), os.path.basename(output_basename) + "_ampdata0" + ext + ".hdf5") waveform_dest_file = os.path.join(pathfinder.data_path(), os.path.basename(output_basename) + "_waveformdata0" + ext + ".hdf5") self.scan_reader.import_winspect() data_reader = dataio.WinspectDataFile(self.sample_data_file) data_reader.read_data() expected_data_list = data_reader.datasets for dataset in expected_data_list: if "amplitude" in dataset.data_type: dest_file = amp_dest_file elif "waveform" in dataset.data_type: dest_file = waveform_dest_file with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(dataset.data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_data(self): """Verify import_data successfully imports data""" sample_data_folder = os.path.join(pathfinder.app_path(), 'models', 'tests', 'support_files') sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc') utwin_data = dataio.get_utwin_data(sample_utwin_file) expected_utwin_data = {} for data_type in utwin_data.keys(): for idx in range(len(utwin_data[data_type])): expected_utwin_data[data_type + str(idx)] = utwin_data[data_type][idx] output_fnames = [] root, ext = os.path.splitext(os.path.basename(sample_utwin_file)) for dataset in expected_utwin_data: output_fnames.append( os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5")) batchui_ctrl.import_data(sample_utwin_file) for dataset in expected_utwin_data: if expected_utwin_data[dataset] is not None: fname = os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5") self.assertTrue(os.path.exists(fname)) self.assertTrue( np.array_equal(expected_utwin_data[dataset], dataio.get_data(fname))) for fname in output_fnames: try: if os.path.exists(fname): os.remove(fname) except WindowsError: # file in use (Windows) pass except OSError: # other OS error pass
def test_import_winspect(self): """Verify import of Winspect data through convenience function""" sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample_data.sdt') assert (os.path.exists(sample_data_file)) output_basename, ext = os.path.splitext(sample_data_file) amp_dest_file = os.path.join( pathfinder.data_path(), os.path.basename(output_basename) + "_ampdata0" + ext + ".hdf5") waveform_dest_file = os.path.join( pathfinder.data_path(), os.path.basename(output_basename) + "_waveformdata0" + ext + ".hdf5") dataio.import_winspect(sample_data_file) expected_data_list = dataio.get_winspect_data(sample_data_file) for dataset in expected_data_list: if "amplitude" in dataset.data_type: dest_file = amp_dest_file elif "waveform" in dataset.data_type: dest_file = waveform_dest_file with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(dataset.data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def populate_tree(self): """Populates the view's tree with the contents in the data folder.""" self.clear_tree() sub_folders = {pathfinder.data_path(): self.view.data_tree_root} for file in self.model.find_data(): fldr, fname = os.path.split(file) if fldr not in sub_folders: sub_folders[fldr] = self.view.data_tree.AppendItem( self.view.data_tree_root, os.path.relpath(fldr, pathfinder.data_path()) ) data_item = self.view.data_tree.AppendItem( sub_folders.get(fldr, self.view.data_tree_root), os.path.basename(file) ) self.view.data_tree.SetPyData(data_item, file)
def populate_tree(self): """Populates the view's tree with the contents in the data folder.""" self.clear_tree() sub_folders = {pathfinder.data_path(): self.view.data_tree_root} for file in self.model.find_data(): fldr, fname = os.path.split(file) if fldr not in sub_folders: sub_folders[fldr] = self.view.data_tree.AppendItem( self.view.data_tree_root, os.path.relpath(fldr, pathfinder.data_path())) data_item = self.view.data_tree.AppendItem( sub_folders.get(fldr, self.view.data_tree_root), os.path.basename(file)) self.view.data_tree.SetItemData(data_item, file)
def import_txt(data_fname, **import_params): """Loads the data from an ASCII-delimited text file, and copies the data to a new HDF5 file in the data folder""" data = get_txt_data(data_fname, **import_params) if data is not None and data.size > 0: output_fname = os.path.join(pathfinder.data_path(), os.path.basename(data_fname)) save_data(output_fname, data)
def import_img(data_file, flatten=True): """Imports an image file, by default flattening the image to a single layer grayscale.""" img_arr = get_img_data(data_file, flatten) if img_arr is not None and img_arr.size > 0: img_fname = os.path.join(pathfinder.data_path(), os.path.basename(data_file)) save_data(img_fname, img_arr)
def import_dicom(data_file): """Imports a DICOM/DICONDE pixel map""" data = get_dicom_data(data_file) if data is not None and data.size > 0: di_fname = os.path.join(pathfinder.data_path(), os.path.basename(data_file)) save_data(di_fname, data)
def find_data(self): """Returns a list of the files found in the data folder""" data_list = [] for root, dirs, files in os.walk(pathfinder.data_path()): for name in files: data_list.append(os.path.join(root, name)) return data_list
def on_browse(self, evt): """Handles request to open data folder. If a data file is not selected in the parent, opens the current user's root data folder instead.""" if self.parent.data is None: browse_fldr = pathfinder.data_path() else: browse_fldr = os.path.dirname(self.parent.data) try: open_file.open_file(browse_fldr) except IOError as err: # file not found module_logger.error("Unable to find folder: {0}".format(err)) err_msg = "Unable to find folder '{0}'.\nPlease ensure the folder exists.".format( browse_fldr) err_dlg = wx.MessageDialog(self.parent, message=err_msg, caption="Unable To Open Folder", style=wx.ICON_ERROR) err_dlg.ShowModal() err_dlg.Destroy() except OSError as err: # other OS error module_logger.error( "Unable to browse to data folder (OS error): {0}".format(err)) err_msg = "Unable to browse to data folder, error reported was:\n{0}".format( err) err_dlg = wx.MessageDialog(self.parent, message=err_msg, caption="Unable To Open Folder", style=wx.ICON_ERROR) err_dlg.ShowModal() err_dlg.Destroy()
def on_slice_data(self, evt): """Handles request to export a slice of data""" slice_dlg = dlg.ExportSliceDialog(parent=self.view, datafile=self.view.data_panel.data) if slice_dlg.ShowModal() == wx.ID_OK: try: wx.BeginBusyCursor() sliced_data = dataio.get_data(self.view.data_panel.data, slice_dlg.get_slice()) sliced_data_fname = "_".join(["sliced", os.path.basename(self.view.data_panel.data)]) fname_dlg = wx.TextEntryDialog(parent=self.view, message="Please specify a filename for the sliced data.", caption="Save Sliced Data", defaultValue=sliced_data_fname) if fname_dlg.ShowModal() == wx.ID_OK: dest_fname = os.path.join(pathfinder.data_path(), fname_dlg.GetValue()) dataio.save_data(dest_fname, sliced_data) self.view.data_panel.populate() except TypeError: # bad dimensions err_dlg = wx.MessageDialog(self.view, message="Specified dimensions out of range for this data.", caption="Unable To Slice Data", style=wx.ICON_ERROR) err_dlg.ShowModal() err_dlg.Destroy() except ValueError: # zero-length slices, etc. err_dlg = wx.MessageDialog(self.view, message="Zero-length slices are not permitted.", caption="Unable To Slice Data", style=wx.ICON_ERROR) err_dlg.ShowModal() err_dlg.Destroy() finally: wx.EndBusyCursor() slice_dlg.Destroy()
def test_import_dicom(self): """Verify import of DICOM / DICONDE data""" # Load the ASTM DICONDE example files, # save, then ensure the resulting arrays # are identical import dicom diconde_folder = os.path.join(os.path.dirname(__file__), 'support_files') for root, dirs, files in os.walk(diconde_folder): for fname in files: dicom_data_file = os.path.join(root, fname) basename, ext = os.path.splitext(dicom_data_file) # Simple check to ensure we're looking at DICOM files if ext.lower() == '.dcm': dicom_data = dicom.read_file(dicom_data_file) dicom_arr = dicom_data.pixel_array dataio.import_dicom(dicom_data_file) dest_file = os.path.join(pathfinder.data_path(), os.path.basename(dicom_data_file) + ".hdf5") self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: froot, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(froot): read_data = fidin[key][...] self.assertTrue(np.array_equal(dicom_arr, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # File in use pass
def test_copy_data(self): """Verify copying of sample data file to data folder""" self.model.copy_data(self.sample_data_file) copied_data_file = os.path.join(pathfinder.data_path(), self.sample_data_basename) self.assertTrue(os.path.exists(copied_data_file)) os.remove(copied_data_file)
def test_import_amp(self): """Verify import of amplitude data""" amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy') csc_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData') assert (os.path.exists(amp_data_file)) expected_amp_data = np.load(amp_data_file) dest_file = os.path.join( pathfinder.data_path(), os.path.basename(csc_data_file) + "_ampdata0.csc.hdf5") self.cscan_datafile.import_amplitude_data() self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue( np.array_equal(expected_amp_data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_tof(self): """Verify import of Time Of Flight data""" tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy') tof_resolution = 0.01 csc_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData') assert (os.path.exists(tof_data_file)) expected_tof_data = np.load(tof_data_file) * tof_resolution dest_file = os.path.join( pathfinder.data_path(), os.path.basename(csc_data_file) + "_tofdata0.csc.hdf5") self.cscan_datafile.import_tof_data() self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] numpy.testing.assert_array_almost_equal(expected_tof_data, read_data, decimal=3) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_dicom(self): """Verify import of DICOM / DICONDE data""" # Load the ASTM DICONDE example files, # save, then ensure the resulting arrays # are identical import dicom diconde_folder = os.path.join(os.path.dirname(__file__), 'support_files') for root, dirs, files in os.walk(diconde_folder): for fname in files: dicom_data_file = os.path.join(root, fname) basename, ext = os.path.splitext(dicom_data_file) # Simple check to ensure we're looking at DICOM files if ext.lower() == '.dcm': dicom_data = dicom.read_file(dicom_data_file) dicom_arr = dicom_data.pixel_array dataio.import_dicom(dicom_data_file) dest_file = os.path.join( pathfinder.data_path(), os.path.basename(dicom_data_file) + ".hdf5") self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: froot, ext = os.path.splitext( os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(froot): read_data = fidin[key][...] self.assertTrue( np.array_equal(dicom_arr, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # File in use pass
def test_remove_data(self): """Verify removal of a data file from the data folder""" self.model.copy_data(self.sample_data_file) copied_data_file = os.path.join(pathfinder.data_path(), self.sample_data_basename) self.assertTrue(os.path.exists(copied_data_file)) self.model.remove_data(copied_data_file) self.assertFalse(os.path.exists(copied_data_file))
def import_winspect(self): """Reads and imports the Winspect data into the default data folder""" output_basename, ext = os.path.splitext(self.data_file.file_name) datasets = self.get_winspect_data() amp_output_counter = 0 waveform_output_counter = 0 for dataset in datasets: if "amplitude" in dataset.data_type: output_fname = os.path.join(pathfinder.data_path(), os.path.basename(output_basename) + "_ampdata" + str(amp_output_counter) + ext) amp_output_counter += 1 elif "waveform" in dataset.data_type: output_fname = os.path.join(pathfinder.data_path(), os.path.basename(output_basename) + "_waveformdata" + str(waveform_output_counter) + ext) waveform_output_counter += 1 if dataset.data is not None and dataset.data.size > 0: save_data(output_fname, dataset.data)
def check_user_path(self): """Verify user data folders were created""" data_folders = [pathfinder.user_path(), pathfinder.data_path(), pathfinder.thumbnails_path(), pathfinder.gates_path(), pathfinder.plugins_path(), pathfinder.podmodels_path(), pathfinder.colormaps_path(), pathfinder.batchoutput_path()] self.model.check_user_path() for folder in data_folders: self.assertTrue(os.path.exists(folder))
def check_user_path(self): """Verify user data folders were created""" data_folders = [pathfinder.user_path(), pathfinder.data_path(), pathfinder.thumbnails_path(), pathfinder.gates_path(), pathfinder.plugins_path(), pathfinder.podmodels_path(), pathfinder.adamodels_path(), pathfinder.colormaps_path(), pathfinder.batchoutput_path()] self.model.check_user_path() for folder in data_folders: self.assertTrue(os.path.exists(folder))
def import_amplitude_data(self): """Imports the amplitude datasets as HDF5 files""" if len(self._data['amplitude']) == 0: self.read_amplitude_data() for dataset_idx in range(len(self._data['amplitude'])): dataset = self._data['amplitude'][dataset_idx] if dataset.size > 0: output_basename, ext = os.path.splitext(self.data_file) output_fname = os.path.join(pathfinder.data_path(), os.path.basename(output_basename) + "_ampdata" + str(dataset_idx) + ext) save_data(output_fname, dataset)
def import_tof_data(self): """Converts the TOF datasets to HDF5""" if len(self._data['tof']) == 0: self.read_tof_data() for dataset_idx in range(len(self._data['tof'])): dataset = self._data['tof'][dataset_idx] if dataset.size > 0: output_basename, ext = os.path.splitext(self.data_file) output_fname = os.path.join(pathfinder.data_path(), os.path.basename(output_basename) + "_tofdata" + str(dataset_idx) + ext) save_data(output_fname, dataset)
def import_winspect(self): """Reads and imports the Winspect data into the default data folder""" output_basename, ext = os.path.splitext(self.data_file.file_name) datasets = self.get_winspect_data() amp_output_counter = 0 waveform_output_counter = 0 for dataset in datasets: if "amplitude" in dataset.data_type: output_fname = os.path.join( pathfinder.data_path(), os.path.basename(output_basename) + "_ampdata" + str(amp_output_counter) + ext) amp_output_counter += 1 elif "waveform" in dataset.data_type: output_fname = os.path.join( pathfinder.data_path(), os.path.basename(output_basename) + "_waveformdata" + str(waveform_output_counter) + ext) waveform_output_counter += 1 if dataset.data is not None and dataset.data.size > 0: save_data(output_fname, dataset.data)
def import_amplitude_data(self): """Imports the amplitude datasets as HDF5 files""" if len(self._data['amplitude']) == 0: self.read_amplitude_data() for dataset_idx in range(len(self._data['amplitude'])): dataset = self._data['amplitude'][dataset_idx] if dataset.size > 0: output_basename, ext = os.path.splitext(self.data_file) output_fname = os.path.join( pathfinder.data_path(), os.path.basename(output_basename) + "_ampdata" + str(dataset_idx) + ext) save_data(output_fname, dataset)
def import_tof_data(self): """Converts the TOF datasets to HDF5""" if len(self._data['tof']) == 0: self.read_tof_data() for dataset_idx in range(len(self._data['tof'])): dataset = self._data['tof'][dataset_idx] if dataset.size > 0: output_basename, ext = os.path.splitext(self.data_file) output_fname = os.path.join( pathfinder.data_path(), os.path.basename(output_basename) + "_tofdata" + str(dataset_idx) + ext) save_data(output_fname, dataset)
def init_ui(self): """Generates the data panel""" self.panel_sizer = wx.BoxSizer(wx.VERTICAL) self.data_tree = wx.TreeCtrl(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize) self.data_tree_root = self.data_tree.AddRoot(pathfinder.data_path()) self.Bind(wx.EVT_TREE_SEL_CHANGED, self.controller.on_tree_selection_changed, self.data_tree) self.data_tree.Bind(wx.EVT_RIGHT_DOWN, self.init_popup_menu) self.panel_sizer.Add(self.data_tree, ui_defaults.ctrl_pct, ui_defaults.sizer_flags, ui_defaults.widget_margin) self.figure_bmp = wx.StaticBitmap(self, wx.ID_ANY, bitmap=wx.NullBitmap, pos=wx.DefaultPosition, size=wx.DefaultSize) self.panel_sizer.Add(self.figure_bmp, ui_defaults.lbl_pct, ui_defaults.lblsizer_flags, ui_defaults.widget_margin) self.SetSizer(self.panel_sizer)
def check_user_path(cls): """Verify that user data folders exist. Creates any missing folders.""" user_folder = pathfinder.user_path() data_folder = pathfinder.data_path() thumbnail_folder = pathfinder.thumbnails_path() plugins_folder = pathfinder.plugins_path() podmodels_folder = pathfinder.podmodels_path() adamodels_folder = pathfinder.adamodels_path() gates_folder = pathfinder.gates_path() colormaps_folder = pathfinder.colormaps_path() batch_folder = pathfinder.batchoutput_path() for fldr in (user_folder, data_folder, thumbnail_folder, plugins_folder, podmodels_folder, gates_folder, adamodels_folder, colormaps_folder, batch_folder): if not os.path.exists(fldr): os.makedirs(fldr)
def check_user_path(cls): """Verify that user data folders exist. Creates any missing folders.""" user_folder = pathfinder.user_path() data_folder = pathfinder.data_path() thumbnail_folder = pathfinder.thumbnails_path() plugins_folder = pathfinder.plugins_path() podmodels_folder = pathfinder.podmodels_path() gates_folder = pathfinder.gates_path() colormaps_folder = pathfinder.colormaps_path() batch_folder = pathfinder.batchoutput_path() for fldr in (user_folder, data_folder, thumbnail_folder, plugins_folder, podmodels_folder, gates_folder, colormaps_folder, batch_folder): if not os.path.exists(fldr): os.makedirs(fldr)
def deleted_user_path(): """Utility function to delete empty folders in the user data folders, used to verify that MainModel will recreate missing folders as required. Returns a list of folders successfully deleted or None if no folders were deleted.""" data_folders = [pathfinder.user_path(), pathfinder.data_path(), pathfinder.thumbnails_path(), pathfinder.plugins_path(), pathfinder.colormaps_path()] deleted_folders = [] for folder in data_folders: exists_and_empty = os.path.exists(folder) and os.listdir(folder) == [] if exists_and_empty: try: os.rmdir(folder) deleted_folders.append(folder) except WindowsError: # folder in use (Explorer, cmd, etc.) pass if deleted_folders: return deleted_folders return None
def deleted_user_path(): """Utility function to delete empty folders in the user data folders, used to verify that MainModel will recreate missing folders as required. Returns a list of folders successfully deleted or None if no folders were deleted.""" data_folders = [pathfinder.user_path(), pathfinder.data_path(), pathfinder.thumbnails_path(), pathfinder.plugins_path(), pathfinder.podmodels_path(), pathfinder.adamodels_path(), pathfinder.colormaps_path()] deleted_folders = [] for folder in data_folders: exists_and_empty = os.path.exists(folder) and os.listdir(folder) == [] if exists_and_empty: try: os.rmdir(folder) deleted_folders.append(folder) except WindowsError: # folder in use (Explorer, cmd, etc.) pass if deleted_folders: return deleted_folders return None
def test_import_amp(self): """Verify import of amplitude data""" amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy') csc_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData') assert(os.path.exists(amp_data_file)) expected_amp_data = np.load(amp_data_file) dest_file = os.path.join(pathfinder.data_path(), os.path.basename(csc_data_file) + "_ampdata0.csc.hdf5") self.cscan_datafile.import_amplitude_data() self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(expected_amp_data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_img(self): """Verify import of images""" sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'austin_sky320x240.jpg') assert(os.path.exists(sample_data_file)) expected_data = scipy.misc.imread(sample_data_file, flatten=True) dataio.import_img(sample_data_file, flatten=True) dest_file = os.path.join(pathfinder.data_path(), os.path.basename(sample_data_file) + ".hdf5") self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(expected_data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_utwin_amp(self): """Verify import of UTWin amplitude data through convenience function""" amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy') sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc') expected_amp_data = np.load(amp_data_file) root, ext = os.path.splitext(os.path.basename(sample_data_file)) dest_file = os.path.join(pathfinder.data_path(), os.path.basename(root) + "_ampdata0.csc.hdf5") dataio.import_utwin_amp(sample_data_file) self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(expected_amp_data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_img(self): """Verify import of images""" sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'austin_sky320x240.jpg') assert (os.path.exists(sample_data_file)) expected_data = scipy.misc.imread(sample_data_file, flatten=True) dataio.import_img(sample_data_file, flatten=True) dest_file = os.path.join(pathfinder.data_path(), os.path.basename(sample_data_file) + ".hdf5") self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(expected_data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_tof(self): """Verify import of Time Of Flight data""" tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy') tof_resolution = 0.01 csc_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData') assert(os.path.exists(tof_data_file)) expected_tof_data = np.load(tof_data_file) * tof_resolution dest_file = os.path.join(pathfinder.data_path(), os.path.basename(csc_data_file) + "_tofdata0.csc.hdf5") self.cscan_datafile.import_tof_data() self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] numpy.testing.assert_array_almost_equal(expected_tof_data, read_data, decimal=3) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_txt(self): """Verify import of ASCII delimited data files""" sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', '1.25 from hole Single Column.asc') assert(os.path.exists(sample_data_file)) import_params = {'delimiter': None} expected_data = np.loadtxt(sample_data_file, delimiter=import_params['delimiter']) dataio.import_txt(sample_data_file, **import_params) dest_file = os.path.join(pathfinder.data_path(), os.path.basename(sample_data_file) + ".hdf5") self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(expected_data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_utwin_amp(self): """Verify import of UTWin amplitude data through convenience function""" amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy') sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc') expected_amp_data = np.load(amp_data_file) root, ext = os.path.splitext(os.path.basename(sample_data_file)) dest_file = os.path.join(pathfinder.data_path(), os.path.basename(root) + "_ampdata0.csc.hdf5") dataio.import_utwin_amp(sample_data_file) self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue( np.array_equal(expected_amp_data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def test_import_txt(self): """Verify import of ASCII delimited data files""" sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', '1.25 from hole Single Column.asc') assert (os.path.exists(sample_data_file)) import_params = {'delimiter': None} expected_data = np.loadtxt(sample_data_file, delimiter=import_params['delimiter']) dataio.import_txt(sample_data_file, **import_params) dest_file = os.path.join(pathfinder.data_path(), os.path.basename(sample_data_file) + ".hdf5") self.assertTrue(os.path.exists(dest_file)) with h5py.File(dest_file, "r") as fidin: root, ext = os.path.splitext(os.path.basename(dest_file)) for key in fidin.keys(): if key.startswith(root): read_data = fidin[key][...] self.assertTrue(np.array_equal(expected_data, read_data)) try: if os.path.exists(dest_file): os.remove(dest_file) except WindowsError: # file in use pass
def on_browse(self, evt): """Handles request to open data folder. If a data file is not selected in the parent, opens the current user's root data folder instead.""" if self.parent.data is None: browse_fldr = pathfinder.data_path() else: browse_fldr = os.path.dirname(self.parent.data) try: open_file.open_file(browse_fldr) except IOError as err: # file not found module_logger.error("Unable to find folder: {0}".format(err)) err_msg = "Unable to find folder '{0}'.\nPlease ensure the folder exists.".format(browse_fldr) err_dlg = wx.MessageDialog(self.parent, message=err_msg, caption="Unable To Open Folder", style=wx.ICON_ERROR) err_dlg.ShowModal() err_dlg.Destroy() except OSError as err: # other OS error module_logger.error("Unable to browse to data folder (OS error): {0}".format(err)) err_msg = "Unable to browse to data folder, error reported was:\n{0}".format(err) err_dlg = wx.MessageDialog(self.parent, message=err_msg, caption="Unable To Open Folder", style=wx.ICON_ERROR) err_dlg.ShowModal() err_dlg.Destroy()
def test_data_path(self): """Verify correct data path""" data_path = os.path.join(self.user_path, 'data') self.assertEqual(data_path, pathfinder.data_path())
def copy_data(self, data_file): """Adds the specified data file to the data folder""" shutil.copy(data_file, pathfinder.data_path())