Example #1
0
 def test_2_dims(self):
     expected = np.vstack((np.tile(np.arange(2),
                                   3), np.repeat(np.arange(3), 2)))
     ret_val = write_utils.make_indices_matrix([2, 3], is_position=False)
     self.assertTrue(np.allclose(expected, ret_val))
     ret_val = write_utils.make_indices_matrix([2, 3], is_position=True)
     self.assertTrue(np.allclose(expected.T, ret_val))
Example #2
0
 def test_weird_inputs(self):
     with self.assertRaises(ValueError):
         _ = write_utils.make_indices_matrix([2, 'hello', 3])
Example #3
0
 def test_matrix_1_dims(self):
     expected = np.expand_dims(np.arange(4), axis=0)
     ret_val = write_utils.make_indices_matrix([4], is_position=False)
     self.assertTrue(np.allclose(expected, ret_val))
     ret_val = write_utils.make_indices_matrix([4], is_position=True)
     self.assertTrue(np.allclose(expected.T, ret_val))
Example #4
0
 def test_non_int_dim_sizes(self):
     with self.assertRaises(ValueError):
         _ = write_utils.make_indices_matrix([1.233, 2.4, 3])
Example #5
0
 def test_not_list(self):
     with self.assertRaises(TypeError):
         _ = write_utils.make_indices_matrix(1)
Example #6
0
 def test_dim_w_val_1(self):
     with self.assertRaises(ValueError):
         _ = write_utils.make_indices_matrix([1, 2, 3])
Example #7
0
 def test_single_value_dimension_list_input(self):
     expected = np.expand_dims(np.arange(1), axis=0)
     ret_val = write_utils.make_indices_matrix([1], is_position=False)
     self.assertTrue(np.allclose(expected, ret_val))
Example #8
0
 def test_empty_list(self):
     with self.assertRaises(ValueError):
         _ = write_utils.make_indices_matrix([])
Example #9
0
 def test_just_size_of_one_dim(self):
     expected = np.expand_dims(np.arange(4), axis=0)
     ret_val = write_utils.make_indices_matrix(4, is_position=False)
     self.assertTrue(np.allclose(expected, ret_val))
Example #10
0
    def translate(self, data_filepath, show_plots=True, save_plots=True, do_histogram=False, debug=False):
        """
        The main function that translates the provided file into a .h5 file
        
        Parameters
        ----------------
        data_filepath : String / unicode
            Absolute path of the data file (.dat)
        show_plots : Boolean (Optional. Default is True)
            Whether or not to show plots
        save_plots : Boolean (Optional. Default is True)
            Whether or not to save the generated plots
        do_histogram : Boolean (Optional. Default is False)
            Whether or not to generate and save 2D histograms of the raw data
        debug : Boolean (Optional. default is false)
            Whether or not to print log statements
            
        Returns
        --------------
        h5_path : String / unicode
            Absolute path of the generated .h5 file

        """
        data_filepath = path.abspath(data_filepath)
        # Read the parameter files
        self.debug = debug
        if debug:
            print('BEndfTranslator: Getting file paths')

        parm_filepath, udvs_filepath, parms_mat_path = self._parse_file_path(data_filepath)
        if debug:
            print('BEndfTranslator: Reading Parms text file')

        isBEPS, self.parm_dict = parmsToDict(parm_filepath)
        self.parm_dict['data_type'] = 'BEPSData'
        if not isBEPS:
            warn('This is NOT a BEPS new-data-format dataset!')
            return None

        """ Find out if this is a custom experiment and whether in and out of field were acquired
        For a standard experiment where only in / out field is acquired, zeros are stored
        even for those UDVS steps without band excitation"""
        self.field_mode = self.parm_dict['VS_measure_in_field_loops']
        expt_type = self.parm_dict['VS_mode']
        self.spec_label = getSpectroscopicParmLabel(expt_type)
        std_expt = expt_type in ['DC modulation mode', 'current mode']
        self.halve_udvs_steps = False
        ignored_plt_grps = []
        if std_expt and self.field_mode != 'in and out-of-field':
            self.halve_udvs_steps = True
            if self.field_mode == 'out-of-field':
                ignored_plt_grps = ['in-field']
            else:
                ignored_plt_grps = ['out-of-field']

        h5_path = path.join(self.folder_path, self.basename + '.h5')
        if path.exists(h5_path):
            remove(h5_path)

        if debug:
            print('BEndfTranslator: Preparing to read parms.mat file')
        self.BE_wave, self.BE_wave_rev, self.BE_bin_inds = self.__get_excit_wfm(parms_mat_path)

        if debug:
            print('BEndfTranslator: About to read UDVS file')

        self.udvs_labs, self.udvs_units, self.udvs_mat = self.__read_udvs_table(udvs_filepath)
        # Remove the unused plot group columns before proceeding:
        self.udvs_mat, self.udvs_labs, self.udvs_units = trimUDVS(self.udvs_mat, self.udvs_labs, self.udvs_units,
                                                                  ignored_plt_grps)
        if debug:
            print('BEndfTranslator: Read UDVS file')

        self.num_udvs_steps = self.udvs_mat.shape[0]
        # This is absolutely crucial for reconstructing the data chronologically
        self.excit_type_vec = (self.udvs_mat[:, 4]).astype(int)

        # First figure out how many waveforms are present in the data from the UDVS
        unique_waves = self.__get_unique_wave_types(self.excit_type_vec)
        self.__unique_waves__ = unique_waves
        self.__num_wave_types__ = len(unique_waves)
        # print self.__num_wave_types__, 'different excitation waveforms in this experiment'

        if debug:
            print('BEndfTranslator: Preparing to set up parsers')

        # Preparing objects to parse the file(s)
        parsers = self.__assemble_parsers()

        # Gathering some basic details before parsing the files:
        self.max_pixels = parsers[0].get_num_pixels()
        s_pixels = np.array(parsers[0].get_spatial_pixels())
        self.pos_labels = ['Laser Spot', 'Z', 'Y', 'X']
        self.pos_labels = [self.pos_labels[i] for i in np.where(s_pixels > 1)[0]]
        self.pos_mat = make_indices_matrix(s_pixels[np.argwhere(s_pixels > 1)].squeeze())
        self.pos_units = ['um' for _ in range(len(self.pos_labels))]
        #         self.pos_mat = np.int32(self.pos_mat)

        # Helping Eric out a bit. Remove this section at a later time:
        main_parms = generate_dummy_main_parms()
        # main_parms['grid_size_x'] = self.parm_dict['grid_num_cols']
        # main_parms['grid_size_y'] = self.parm_dict['grid_num_rows']
        main_parms['grid_size_x'] = self.parm_dict['grid_num_rows']
        main_parms['grid_size_y'] = self.parm_dict['grid_num_cols']
        main_parms['experiment_date'] = self.parm_dict['File_date_and_time']
        # assuming that the experiment was completed:        
        main_parms['current_position_x'] = self.parm_dict['grid_num_rows'] - 1
        main_parms['current_position_y'] = self.parm_dict['grid_num_cols'] - 1
        main_parms['data_type'] = 'BEPSData'
        main_parms['translator'] = 'NDF'

        # Writing only the root now:
        spm_data = VirtualGroup('')
        spm_data.attrs = main_parms
        self.hdf = HDFwriter(h5_path)
        # self.hdf.clear()

        # cacheSettings = self.hdf.file.id.get_access_plist().get_cache()

        self.hdf.write(spm_data)

        ########################################################
        # Reading and parsing the .dat file(s) 

        self._read_data(parsers, unique_waves, show_plots, save_plots, do_histogram)

        self.hdf.close()

        return h5_path