Example #1
0
def import_data(input_file, file_type=None):
    """Convenience function for importing recognized file formats and saving the results to NDIToolbox data folder.
    Primarily used for multiprocess pools.

    input_file -        name of the input data file.  If file_type is not specified (default),
                        type of file is assumed based on file extension.

    file_type -         (optional) specify the file format.  Must be one of the file formats
                        supported by NDIToolbox.  Currently supported: 'image', 'nditoolbox',
                        'utwin', 'csv', 'winspect', 'dicom' (use the available_file_types
                        function to retrieve a list of supported types).  If not specified,
                        format is assumed based on file extension.
    """
    data = read_data(input_file, file_type)
    if hasattr(data, "keys"):
            # Handle multiple datasets
            for dataset in data:
                root, ext = os.path.splitext(os.path.basename(input_file))
                output_fname = os.path.join(pathfinder.data_path(), root + "_" + dataset + ".hdf5")
                dataio.save_data(output_fname, data[dataset])
    else:
        # Handle single dataset
        root, ext = os.path.splitext(os.path.basename(input_file))
        output_fname = os.path.join(pathfinder.data_path(), root + ".hdf5")
        dataio.save_data(output_fname, data)
Example #2
0
 def on_slice_data(self, evt):
     """Handles request to export a slice of data"""
     slice_dlg = dlg.ExportSliceDialog(parent=self.view, datafile=self.view.data_panel.data)
     if slice_dlg.ShowModal() == wx.ID_OK:
         try:
             wx.BeginBusyCursor()
             sliced_data = dataio.get_data(self.view.data_panel.data, slice_dlg.get_slice())
             sliced_data_fname = "_".join(["sliced",
                                           os.path.basename(self.view.data_panel.data)])
             fname_dlg = wx.TextEntryDialog(parent=self.view, message="Please specify a filename for the sliced data.",
                 caption="Save Sliced Data", defaultValue=sliced_data_fname)
             if fname_dlg.ShowModal() == wx.ID_OK:
                 dest_fname = os.path.join(pathfinder.data_path(), fname_dlg.GetValue())
                 dataio.save_data(dest_fname, sliced_data)
                 self.view.data_panel.populate()
         except TypeError: # bad dimensions
             err_dlg = wx.MessageDialog(self.view, message="Specified dimensions out of range for this data.",
                 caption="Unable To Slice Data", style=wx.ICON_ERROR)
             err_dlg.ShowModal()
             err_dlg.Destroy()
         except ValueError: # zero-length slices, etc.
             err_dlg = wx.MessageDialog(self.view, message="Zero-length slices are not permitted.",
                 caption="Unable To Slice Data", style=wx.ICON_ERROR)
             err_dlg.ShowModal()
             err_dlg.Destroy()
         finally:
             wx.EndBusyCursor()
     slice_dlg.Destroy()
Example #3
0
 def on_slice_data(self, evt):
     """Handles request to export a slice of data"""
     slice_dlg = dlg.ExportSliceDialog(parent=self.view, datafile=self.view.data_panel.data)
     if slice_dlg.ShowModal() == wx.ID_OK:
         try:
             wx.BeginBusyCursor()
             sliced_data = dataio.get_data(self.view.data_panel.data, slice_dlg.get_slice())
             sliced_data_fname = "_".join(["sliced",
                                           os.path.basename(self.view.data_panel.data)])
             fname_dlg = wx.TextEntryDialog(parent=self.view, message="Please specify a filename for the sliced data.",
                 caption="Save Sliced Data", defaultValue=sliced_data_fname)
             if fname_dlg.ShowModal() == wx.ID_OK:
                 dest_fname = os.path.join(pathfinder.data_path(), fname_dlg.GetValue())
                 dataio.save_data(dest_fname, sliced_data)
                 self.view.data_panel.populate()
         except TypeError: # bad dimensions
             err_dlg = wx.MessageDialog(self.view, message="Specified dimensions out of range for this data.",
                 caption="Unable To Slice Data", style=wx.ICON_ERROR)
             err_dlg.ShowModal()
             err_dlg.Destroy()
         except ValueError: # zero-length slices, etc.
             err_dlg = wx.MessageDialog(self.view, message="Zero-length slices are not permitted.",
                 caption="Unable To Slice Data", style=wx.ICON_ERROR)
             err_dlg.ShowModal()
             err_dlg.Destroy()
         finally:
             wx.EndBusyCursor()
     slice_dlg.Destroy()
Example #4
0
def run_plugin(toolkit, input_file, toolkit_config=None, file_type=None, save_data=True):
    """Convenience function for creating and executing BatchPluginAdapters and optionally saving
    results to NDIToolbox data folder, e.g. for multiprocessing Pools.

    toolkit -           name of plugin class (NOT name of plugin file):  e.g. MedianFilterPlugin,
                        not medfilter_plugin.py.  Must be an installed NDIToolbox plugin.

    input_file -        name of the input data file.  If file_type is not specified (default),
                        type of file is assumed based on file extension.

    toolkit_config -    (optional) JSON configuration file for the toolkit.

    file_type -         (optional) specify the file format.  Must be one of the file formats
                        supported by NDIToolbox.  Currently supported: 'image', 'nditoolbox',
                        'utwin', 'csv', 'winspect', 'dicom' (use the available_file_types
                        function to retrieve a list of supported types).  If not specified,
                        format is assumed based on file extension.

    save_data -         (optional) if True, resultant data are saved to a new HDF5 data file with
                        the same basename as the input file.  Defaults to True.
    """
    batch_runner = BatchPluginAdapter(toolkit, input_file, toolkit_cfg=toolkit_config, filetype=file_type)
    batch_runner.run()
    if save_data:
        if hasattr(batch_runner.data, "keys"):
            # Handle multiple datasets
            for dataset in batch_runner.data:
                root, ext = os.path.splitext(os.path.basename(input_file))
                output_fname = os.path.join(pathfinder.batchoutput_path(), root + "_" + dataset + ".hdf5")
                dataio.save_data(output_fname, batch_runner.data[dataset])
        else:
            # Handle single dataset
            root, ext = os.path.splitext(os.path.basename(input_file))
            output_fname = os.path.join(pathfinder.batchoutput_path(), root + ".hdf5")
            dataio.save_data(output_fname, batch_runner._data)
Example #5
0
 def on_save_data(self, evt):
     """Handles request to save current data set to disk"""
     default_path, default_file = os.path.split(self.model.data_file)
     wild_card = "NDIToolbox data files (*.hdf5)|*.hdf5|All files (*.*)|*.*"
     save_dlg = wx.FileDialog(self.view, message="Save File As...",
                              defaultDir=default_path,
                              defaultFile=default_file,
                              wildcard=wild_card,
                              style=wx.SAVE | wx.OVERWRITE_PROMPT)
     if save_dlg.ShowModal() == wx.ID_OK:
         dataio.save_data(save_dlg.GetPath(), self.data)
         self.view.parent.refresh()
     save_dlg.Destroy()
Example #6
0
 def test_save_data(self):
     """Verify save_data function saves NumPy array to disk"""
     sample_filename = "test_savedata.dat"
     sample_path = os.path.join(os.path.dirname(__file__), sample_filename)
     dataio.save_data(sample_path, self.sample_data)
     self.assertTrue(os.path.exists(sample_path + ".hdf5"))
     with h5py.File(sample_path + ".hdf5", "r") as fidin:
         froot, ext = os.path.splitext(os.path.basename(sample_filename))
         for key in fidin.keys():
             if key.startswith(froot):
                 read_data = fidin[key][...]
                 self.assertTrue(np.array_equal(self.sample_data, read_data))
     if os.path.exists(sample_path + ".hdf5"):
         os.remove(sample_path + ".hdf5")
Example #7
0
 def on_save_data(self, evt):
     """Handles request to save current data set to disk"""
     default_path, default_file = os.path.split(self.model.data_file)
     wild_card = "NDIToolbox data files (*.hdf5)|*.hdf5|All files (*.*)|*.*"
     save_dlg = wx.FileDialog(self.view,
                              message="Save File As...",
                              defaultDir=default_path,
                              defaultFile=default_file,
                              wildcard=wild_card,
                              style=wx.SAVE | wx.OVERWRITE_PROMPT)
     if save_dlg.ShowModal() == wx.ID_OK:
         dataio.save_data(save_dlg.GetPath(), self.data)
         self.view.parent.refresh()
     save_dlg.Destroy()
Example #8
0
 def test_save_data(self):
     """Verify save_data function saves NumPy array to disk"""
     sample_filename = "test_savedata.dat"
     sample_path = os.path.join(os.path.dirname(__file__), sample_filename)
     dataio.save_data(sample_path, self.sample_data)
     self.assertTrue(os.path.exists(sample_path + ".hdf5"))
     with h5py.File(sample_path + ".hdf5", "r") as fidin:
         froot, ext = os.path.splitext(os.path.basename(sample_filename))
         for key in fidin.keys():
             if key.startswith(froot):
                 read_data = fidin[key][...]
                 self.assertTrue(np.array_equal(self.sample_data,
                                                read_data))
     if os.path.exists(sample_path + ".hdf5"):
         os.remove(sample_path + ".hdf5")
Example #9
0
 def create_datafile(self, ext=".hdf5"):
     """Returns a NamedTemporaryFile containing NumPy data.  Caller responsible for deletion."""
     temp_file = tempfile.NamedTemporaryFile(suffix=ext, delete=False)
     rnd_data = [random.uniform(-5, 5) for i in range(11)]
     dataio.save_data(temp_file.name, np.array(rnd_data))
     return temp_file.name
Example #10
0
 def create_datafile(self, ext=".hdf5"):
     """Returns a NamedTemporaryFile containing NumPy data.  Caller responsible for deletion."""
     temp_file = tempfile.NamedTemporaryFile(suffix=ext, delete=False)
     rnd_data = [random.uniform(-5, 5) for i in range(11)]
     dataio.save_data(temp_file.name, np.array(rnd_data))
     return temp_file.name
Example #11
0
 def save_data(cls, file_name, data):
     """Saves NumPy array data to the specified file name"""
     dataio.save_data(file_name, data)
Example #12
0
 def save_data(cls, file_name, data):
     """Saves NumPy array data to the specified file name"""
     dataio.save_data(file_name, data)