Ejemplo n.º 1
0
 def test_run_plugin(self):
     """Verify run_plugin convenience function correctly executes"""
     root, ext = os.path.splitext(os.path.basename(self.datafile))
     output_fname = os.path.join(pathfinder.batchoutput_path(), root + ".hdf5")
     batchui_ctrl.run_plugin(self.toolkit_class, self.datafile, save_data=False)
     self.assertFalse(os.path.exists(output_fname))
     batchui_ctrl.run_plugin(self.toolkit_class, self.datafile, save_data=True)
     self.assertTrue(os.path.exists(output_fname))
     plugin_names, plugin_classes = self.get_available_plugins()
     for idx in range(len(plugin_names)):
         if plugin_names[idx] == self.toolkit_class:
             plugin_instance = plugin_classes[idx]()
             plugin_instance.data = dataio.get_data(self.datafile)
             plugin_instance.run()
             expected_data = plugin_instance.data
             stored_data = dataio.get_data(output_fname)
             self.assertTrue(np.array_equal(expected_data, stored_data))
             break
     if os.path.exists(output_fname):
         try:
             os.remove(output_fname)
         except WindowsError: # file in use (Windows)
             pass
         except OSError: # other OS error
             pass
Ejemplo n.º 2
0
 def test_run_plugin(self):
     """Verify run_plugin convenience function correctly executes"""
     root, ext = os.path.splitext(os.path.basename(self.datafile))
     output_fname = os.path.join(pathfinder.batchoutput_path(),
                                 root + ".hdf5")
     batchui_ctrl.run_plugin(self.toolkit_class,
                             self.datafile,
                             save_data=False)
     self.assertFalse(os.path.exists(output_fname))
     batchui_ctrl.run_plugin(self.toolkit_class,
                             self.datafile,
                             save_data=True)
     self.assertTrue(os.path.exists(output_fname))
     plugin_names, plugin_classes = self.get_available_plugins()
     for idx in range(len(plugin_names)):
         if plugin_names[idx] == self.toolkit_class:
             plugin_instance = plugin_classes[idx]()
             plugin_instance.data = dataio.get_data(self.datafile)
             plugin_instance.run()
             expected_data = plugin_instance.data
             stored_data = dataio.get_data(output_fname)
             self.assertTrue(np.array_equal(expected_data, stored_data))
             break
     if os.path.exists(output_fname):
         try:
             os.remove(output_fname)
         except WindowsError:  # file in use (Windows)
             pass
         except OSError:  # other OS error
             pass
Ejemplo n.º 3
0
    def get_data_info(self, data_filename):
        """Returns a dict of basic info about the HDF5 data file data_filename, or None if no data found.
        Structure of dict:

        'filesize': size of HDF5 file in bytes
        'ndim': number of dimensions in data array
        'shape': (tuple) shape of data array
        'numpoints': number of elements in data array
        'dtype': (str) type of data (NumPy dtype) in data array
        """
        data = dataio.get_data(data_filename)
        if data is not None:
            try:
                data_info = {
                    'filesize': int(os.path.getsize(data_filename)),
                    'ndim': data.ndim,
                    'shape': data.shape,
                    'numpoints': data.size,
                    'dtype': str(data.dtype)
                }
                gc.collect()
                return data_info
            except os.error:
                return None
        return None
Ejemplo n.º 4
0
 def test_import_data(self):
     """Verify import_data successfully imports data"""
     sample_data_folder = os.path.join(pathfinder.app_path(), 'models', 'tests', 'support_files')
     sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc')
     utwin_data = dataio.get_utwin_data(sample_utwin_file)
     expected_utwin_data = {}
     for data_type in utwin_data.keys():
         for idx in range(len(utwin_data[data_type])):
             expected_utwin_data[data_type + str(idx)] = utwin_data[data_type][idx]
     output_fnames = []
     root, ext = os.path.splitext(os.path.basename(sample_utwin_file))
     for dataset in expected_utwin_data:
         output_fnames.append(os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5"))
     batchui_ctrl.import_data(sample_utwin_file)
     for dataset in expected_utwin_data:
         if expected_utwin_data[dataset] is not None:
             fname = os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5")
             self.assertTrue(os.path.exists(fname))
             self.assertTrue(np.array_equal(expected_utwin_data[dataset], dataio.get_data(fname)))
     for fname in output_fnames:
         try:
             if os.path.exists(fname):
                 os.remove(fname)
         except WindowsError: # file in use (Windows)
             pass
         except OSError: # other OS error
             pass
Ejemplo n.º 5
0
 def test_import_data(self):
     """Verify import_data successfully imports data"""
     sample_data_folder = os.path.join(pathfinder.app_path(), 'models',
                                       'tests', 'support_files')
     sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc')
     utwin_data = dataio.get_utwin_data(sample_utwin_file)
     expected_utwin_data = {}
     for data_type in utwin_data.keys():
         for idx in range(len(utwin_data[data_type])):
             expected_utwin_data[data_type +
                                 str(idx)] = utwin_data[data_type][idx]
     output_fnames = []
     root, ext = os.path.splitext(os.path.basename(sample_utwin_file))
     for dataset in expected_utwin_data:
         output_fnames.append(
             os.path.join(pathfinder.data_path(),
                          root + "_" + dataset + ".hdf5"))
     batchui_ctrl.import_data(sample_utwin_file)
     for dataset in expected_utwin_data:
         if expected_utwin_data[dataset] is not None:
             fname = os.path.join(pathfinder.data_path(),
                                  root + "_" + dataset + ".hdf5")
             self.assertTrue(os.path.exists(fname))
             self.assertTrue(
                 np.array_equal(expected_utwin_data[dataset],
                                dataio.get_data(fname)))
     for fname in output_fnames:
         try:
             if os.path.exists(fname):
                 os.remove(fname)
         except WindowsError:  # file in use (Windows)
             pass
         except OSError:  # other OS error
             pass
Ejemplo n.º 6
0
 def on_slice_data(self, evt):
     """Handles request to export a slice of data"""
     slice_dlg = dlg.ExportSliceDialog(parent=self.view, datafile=self.view.data_panel.data)
     if slice_dlg.ShowModal() == wx.ID_OK:
         try:
             wx.BeginBusyCursor()
             sliced_data = dataio.get_data(self.view.data_panel.data, slice_dlg.get_slice())
             sliced_data_fname = "_".join(["sliced",
                                           os.path.basename(self.view.data_panel.data)])
             fname_dlg = wx.TextEntryDialog(parent=self.view, message="Please specify a filename for the sliced data.",
                 caption="Save Sliced Data", defaultValue=sliced_data_fname)
             if fname_dlg.ShowModal() == wx.ID_OK:
                 dest_fname = os.path.join(pathfinder.data_path(), fname_dlg.GetValue())
                 dataio.save_data(dest_fname, sliced_data)
                 self.view.data_panel.populate()
         except TypeError: # bad dimensions
             err_dlg = wx.MessageDialog(self.view, message="Specified dimensions out of range for this data.",
                 caption="Unable To Slice Data", style=wx.ICON_ERROR)
             err_dlg.ShowModal()
             err_dlg.Destroy()
         except ValueError: # zero-length slices, etc.
             err_dlg = wx.MessageDialog(self.view, message="Zero-length slices are not permitted.",
                 caption="Unable To Slice Data", style=wx.ICON_ERROR)
             err_dlg.ShowModal()
             err_dlg.Destroy()
         finally:
             wx.EndBusyCursor()
     slice_dlg.Destroy()
Ejemplo n.º 7
0
def plot(data_filename, width, height):
    """Returns a PNG plot of the specified data file's dataset"""
    data = dataio.get_data(data_filename)
    return gen_thumbnail(plot_stream(data,
                                     os.path.basename(data_filename),
                                     width, height),
                         data_filename)
Ejemplo n.º 8
0
 def on_slice_data(self, evt):
     """Handles request to export a slice of data"""
     slice_dlg = dlg.ExportSliceDialog(parent=self.view, datafile=self.view.data_panel.data)
     if slice_dlg.ShowModal() == wx.ID_OK:
         try:
             wx.BeginBusyCursor()
             sliced_data = dataio.get_data(self.view.data_panel.data, slice_dlg.get_slice())
             sliced_data_fname = "_".join(["sliced",
                                           os.path.basename(self.view.data_panel.data)])
             fname_dlg = wx.TextEntryDialog(parent=self.view, message="Please specify a filename for the sliced data.",
                 caption="Save Sliced Data", defaultValue=sliced_data_fname)
             if fname_dlg.ShowModal() == wx.ID_OK:
                 dest_fname = os.path.join(pathfinder.data_path(), fname_dlg.GetValue())
                 dataio.save_data(dest_fname, sliced_data)
                 self.view.data_panel.populate()
         except TypeError: # bad dimensions
             err_dlg = wx.MessageDialog(self.view, message="Specified dimensions out of range for this data.",
                 caption="Unable To Slice Data", style=wx.ICON_ERROR)
             err_dlg.ShowModal()
             err_dlg.Destroy()
         except ValueError: # zero-length slices, etc.
             err_dlg = wx.MessageDialog(self.view, message="Zero-length slices are not permitted.",
                 caption="Unable To Slice Data", style=wx.ICON_ERROR)
             err_dlg.ShowModal()
             err_dlg.Destroy()
         finally:
             wx.EndBusyCursor()
     slice_dlg.Destroy()
Ejemplo n.º 9
0
 def test_run(self):
     """Verify correctly executing NDIToolbox plugins"""
     plugin_names, plugin_classes = self.get_available_plugins()
     for idx in range(len(plugin_names)):
         adapter = batchui_ctrl.BatchPluginAdapter(plugin_names[idx], self.datafile)
         plugin_cls_inst = plugin_classes[idx]()
         plugin_cls_inst._data = dataio.get_data(self.datafile)
         plugin_cls_inst.run()
         expected_data = plugin_cls_inst._data
         adapter.run()
         returned_data = adapter.data
         self.assertTrue(np.array_equal(expected_data, returned_data))
Ejemplo n.º 10
0
 def test_run_plugin_multi_datasets(self):
     """Verify run_plugin convenience function correctly handles datafiles with
     multiple datasets"""
     sample_data_folder = os.path.join(pathfinder.app_path(), 'models',
                                       'tests', 'support_files')
     sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc')
     utwin_data = dataio.get_utwin_data(sample_utwin_file)
     expected_utwin_data = {}
     for data_type in utwin_data.keys():
         for idx in range(len(utwin_data[data_type])):
             expected_utwin_data[data_type +
                                 str(idx)] = utwin_data[data_type][idx]
     output_fnames = []
     root, ext = os.path.splitext(os.path.basename(sample_utwin_file))
     for dataset in expected_utwin_data:
         output_fnames.append(
             os.path.join(pathfinder.batchoutput_path(),
                          root + "_" + dataset + ".hdf5"))
     # Verify no output saved
     batchui_ctrl.run_plugin(self.toolkit_class,
                             sample_utwin_file,
                             save_data=False)
     for fname in output_fnames:
         self.assertFalse(os.path.exists(fname))
     # Verify output saved
     batchui_ctrl.run_plugin(self.toolkit_class,
                             sample_utwin_file,
                             save_data=True)
     for dataset in expected_utwin_data:
         if expected_utwin_data[dataset] is not None:
             fname = os.path.join(pathfinder.batchoutput_path(),
                                  root + "_" + dataset + ".hdf5")
             self.assertTrue(os.path.exists(fname))
             plugin_names, plugin_classes = self.get_available_plugins()
             for idx in range(len(plugin_names)):
                 if plugin_names[idx] == self.toolkit_class:
                     plugin_instance = plugin_classes[idx]()
                     plugin_instance.data = expected_utwin_data[dataset]
                     plugin_instance.run()
                     expected_data = plugin_instance.data
                     returned_data = dataio.get_data(fname)
                     self.assertTrue(
                         np.array_equal(expected_data, returned_data))
                     break
     for fname in output_fnames:
         try:
             if os.path.exists(fname):
                 os.remove(fname)
         except WindowsError:  # file in use (Windows)
             pass
         except OSError:  # other OS error
             pass
Ejemplo n.º 11
0
 def test_run(self):
     """Verify correctly executing NDIToolbox plugins"""
     plugin_names, plugin_classes = self.get_available_plugins()
     for idx in range(len(plugin_names)):
         adapter = batchui_ctrl.BatchPluginAdapter(plugin_names[idx],
                                                   self.datafile)
         plugin_cls_inst = plugin_classes[idx]()
         plugin_cls_inst._data = dataio.get_data(self.datafile)
         plugin_cls_inst.run()
         expected_data = plugin_cls_inst._data
         adapter.run()
         returned_data = adapter.data
         self.assertTrue(np.array_equal(expected_data, returned_data))
Ejemplo n.º 12
0
 def test_get_data_info(self):
     """Verify get_data_info returns info about a data file"""
     file_size = int(os.stat(self.sample_data_file).st_size)
     data = dataio.get_data(self.sample_data_file)
     data_info = self.model.get_data_info(self.sample_data_file)
     ndim = data.ndim
     shape = data.shape
     numpoints = data.size
     dtype = str(data.dtype)
     self.assertEqual(file_size, data_info['filesize'])
     self.assertEqual(ndim, data_info['ndim'])
     self.assertEqual(shape, data_info['shape'])
     self.assertEqual(numpoints, data_info['numpoints'])
     self.assertEqual(dtype, data_info['dtype'])
Ejemplo n.º 13
0
def multiprocess_plot(data_filename, width, height):
    """Spawns a subprocess to generate the plot, and returns the result as a PNG wxBitmap.
    The result is also saved to the thumbnails folder for reuse.  If the data has more than
    two dimensions, returns None and no thumbnail image is produced.
    """
    data = dataio.get_data(data_filename)
    if data.ndim < 3:
        in_conn, out_conn = Pipe()
        plot_proc = Process(target=plot_pipe,
                            args=(data, os.path.basename(data_filename), width, height, out_conn))
        plot_proc.start()
        img_stream = in_conn.recv()
        plot_proc.join()
        return gen_thumbnail(img_stream, data_filename)
    return None
Ejemplo n.º 14
0
def multiprocess_plot(data_filename, width, height):
    """Spawns a subprocess to generate the plot, and returns the result as a PNG wxBitmap.
    The result is also saved to the thumbnails folder for reuse.  If the data has more than
    two dimensions, returns None and no thumbnail image is produced.
    """
    data = dataio.get_data(data_filename)
    if data.ndim < 3:
        in_conn, out_conn = Pipe()
        plot_proc = Process(target=plot_pipe,
                            args=(data, os.path.basename(data_filename), width,
                                  height, out_conn))
        plot_proc.start()
        img_stream = in_conn.recv()
        plot_proc.join()
        return gen_thumbnail(img_stream, data_filename)
    return None
Ejemplo n.º 15
0
 def test_run_plugin_multi_datasets(self):
     """Verify run_plugin convenience function correctly handles datafiles with
     multiple datasets"""
     sample_data_folder = os.path.join(pathfinder.app_path(), 'models', 'tests', 'support_files')
     sample_utwin_file = os.path.join(sample_data_folder, 'CScanData.csc')
     utwin_data = dataio.get_utwin_data(sample_utwin_file)
     expected_utwin_data = {}
     for data_type in utwin_data.keys():
         for idx in range(len(utwin_data[data_type])):
             expected_utwin_data[data_type + str(idx)] = utwin_data[data_type][idx]
     output_fnames = []
     root, ext = os.path.splitext(os.path.basename(sample_utwin_file))
     for dataset in expected_utwin_data:
         output_fnames.append(os.path.join(pathfinder.batchoutput_path(), root + "_" + dataset + ".hdf5"))
     # Verify no output saved
     batchui_ctrl.run_plugin(self.toolkit_class, sample_utwin_file, save_data=False)
     for fname in output_fnames:
         self.assertFalse(os.path.exists(fname))
     # Verify output saved
     batchui_ctrl.run_plugin(self.toolkit_class, sample_utwin_file, save_data=True)
     for dataset in expected_utwin_data:
         if expected_utwin_data[dataset] is not None:
             fname = os.path.join(pathfinder.batchoutput_path(), root + "_" + dataset + ".hdf5")
             self.assertTrue(os.path.exists(fname))
             plugin_names, plugin_classes = self.get_available_plugins()
             for idx in range(len(plugin_names)):
                 if plugin_names[idx] == self.toolkit_class:
                     plugin_instance = plugin_classes[idx]()
                     plugin_instance.data = expected_utwin_data[dataset]
                     plugin_instance.run()
                     expected_data = plugin_instance.data
                     returned_data = dataio.get_data(fname)
                     self.assertTrue(np.array_equal(expected_data, returned_data))
                     break
     for fname in output_fnames:
         try:
             if os.path.exists(fname):
                 os.remove(fname)
         except WindowsError: # file in use (Windows)
             pass
         except OSError: # other OS error
             pass
Ejemplo n.º 16
0
def read_data(filename, filetype=None):
    """Attempts to import the specified file based on the provided filetype, or automatically guesses the file format
    based on the file extension if no filetype is given.  Returns the data as a NumPy array if successfully imported as
    a NumPy array if the file contained a single dataset or as a dict if multiple datasets were found."""
    tof_counter = 0
    amp_counter = 0
    waveform_counter = 0
    data = {}
    if filetype is None:
        filetype = get_file_type(filename)
    if filetype is not None and filetype in available_file_types():
        if filetype == 'nditoolbox':
            data = dataio.get_data(filename)
        if filetype == 'winspect':
            raw_data = dataio.get_winspect_data(filename)
            # Handle any files that may have stored multiple datasets of
            # a given type(s)
            for dataset in raw_data:
                dataset_key = os.path.basename(filename)
                if dataset.data_type == 'waveform':
                    dataset_key = 'waveform' + str(waveform_counter)
                    waveform_counter +=1
                elif dataset.data_type == 'amplitude':
                    dataset_key = 'amplitude' + str(amp_counter)
                    amp_counter += 1
                elif dataset.data_type == 'tof': #TODO -confirm SDT files use tof
                    dataset_key = 'tof' + str(tof_counter)
                    tof_counter += 1
                data[dataset_key] = dataset.data
        if filetype == 'csv':
            data = dataio.get_txt_data(filename)
        if filetype == 'image':
            data = dataio.get_img_data(filename, flatten=True)
        if filetype == 'dicom':
            data = dataio.get_dicom_data(filename)
        if filetype == 'utwin':
            raw_data = dataio.get_utwin_data(filename)
            for k in raw_data.keys():
                for idx in range(len(raw_data[k])):
                    data[k + str(idx)] = raw_data[k][idx]
    return data
Ejemplo n.º 17
0
 def on_check_dims(self, evt):
     """Handles request to check the dimensions of the data file"""
     try:
         if self.datafile is not None:
             wx.BeginBusyCursor()
             data = dataio.get_data(self.datafile)
             if data is not None:
                 self.datadim_lbl.SetValue(str(data.shape))
                 self.ymax_sc.SetRange(-1, data.shape[0])
                 if data.ndim > 1:
                     self.xmax_sc.SetRange(-1, data.shape[1])
                 else:
                     self.xmin_sc.Enable(False)
                     self.xmax_sc.Enable(False)
                 if data.ndim > 2:
                     self.zmax_sc.SetRange(-1, data.shape[2])
                 else:
                     self.zmin_sc.Enable(False)
                     self.zmax_sc.Enable(False)
     finally:
         wx.EndBusyCursor()
Ejemplo n.º 18
0
    def get_data_info(self, data_filename):
        """Returns a dict of basic info about the HDF5 data file data_filename, or None if no data found.
        Structure of dict:

        'filesize': size of HDF5 file in bytes
        'ndim': number of dimensions in data array
        'shape': (tuple) shape of data array
        'numpoints': number of elements in data array
        'dtype': (str) type of data (NumPy dtype) in data array
        """
        data = dataio.get_data(data_filename)
        if data is not None:
            try:
                data_info = {'filesize':int(os.path.getsize(data_filename)),
                             'ndim':data.ndim,
                             'shape':data.shape,
                             'numpoints':data.size,
                             'dtype':str(data.dtype)}
                gc.collect()
                return data_info
            except os.error:
                return None
        return None
Ejemplo n.º 19
0
 def test_get_data(self):
     """Verify get_data function returns a NumPy array"""
     read_data = dataio.get_data(self.sample_data_file)
     self.assertTrue(np.array_equal(self.sample_data, read_data))
Ejemplo n.º 20
0
 def load_data(cls, file_name):
     """Returns NumPy array from the specified file."""
     return dataio.get_data(file_name)
Ejemplo n.º 21
0
 def test_get_data(self):
     """Verify get_data function returns a NumPy array"""
     read_data = dataio.get_data(self.sample_data_file)
     self.assertTrue(np.array_equal(self.sample_data, read_data))
Ejemplo n.º 22
0
def plot(data_filename, width, height):
    """Returns a PNG plot of the specified data file's dataset"""
    data = dataio.get_data(data_filename)
    return gen_thumbnail(
        plot_stream(data, os.path.basename(data_filename), width, height),
        data_filename)
Ejemplo n.º 23
0
 def test_get_data_slice(self):
     """Verify get_data function returns a slice if specified"""
     slice_idx = np.s_[5:15]
     read_hyperslab = dataio.get_data(self.sample_data_file, slice_idx)
     self.assertTrue(np.array_equal(self.sample_data[slice_idx], read_hyperslab))
Ejemplo n.º 24
0
 def load_data(self, slice_idx=None):
     """Loads the data from the instance's data file, by default returning the entire data set (slice_idx is None).
     If slice_idx is a numpy.s_ slice operation, attempts to return a hyperslab (HDF5 feature - returns a slice
     of the data instead without loading the complete data)."""
     self._data = dataio.get_data(self.data_file, slice_idx)
Ejemplo n.º 25
0
 def test_get_data_slice(self):
     """Verify get_data function returns a slice if specified"""
     slice_idx = np.s_[5:15]
     read_hyperslab = dataio.get_data(self.sample_data_file, slice_idx)
     self.assertTrue(
         np.array_equal(self.sample_data[slice_idx], read_hyperslab))
Ejemplo n.º 26
0
 def load_data(cls, file_name):
     """Returns NumPy array from the specified file."""
     return dataio.get_data(file_name)