Пример #1
0
 def test_illegal_key(self):
     with h5py.File(data_utils.std_beps_path, mode='r') as h5_f:
         h5_inds = h5_f['/Raw_Measurement/Spectroscopic_Indices']
         h5_vals = h5_f['/Raw_Measurement/Spectroscopic_Values']
         with self.assertRaises(KeyError):
             _ = hdf_utils.get_unit_values(
                 h5_inds, h5_vals, dim_names=['Cycle', 'Does not exist'])
Пример #2
0
 def test_illegal_dset(self):
     with h5py.File(data_utils.std_beps_path, mode='r') as h5_f:
         h5_inds = h5_f['/Raw_Measurement/Spectroscopic_Indices']
         h5_vals = h5_f['/Raw_Measurement/Ancillary']
         with self.assertRaises(ValueError):
             _ = hdf_utils.get_unit_values(h5_inds,
                                           h5_vals,
                                           dim_names=['Cycle', 'Bias'])
Пример #3
0
    def test_all_dim_names_not_provided(self):
        with h5py.File(data_utils.std_beps_path, mode='r') as h5_f:
            h5_inds = h5_f['/Raw_Measurement/Position_Indices'][()]
            h5_vals = h5_f['/Raw_Measurement/Position_Values'][()]

            with self.assertRaises(TypeError):
                _ = hdf_utils.get_unit_values(h5_inds,
                                              h5_vals,
                                              dim_names=['Y'])
Пример #4
0
    def test_sparse_samp_w_attr(self):
        # What should the user expect this function to do? throw an error.
        with h5py.File(data_utils.sparse_sampling_path, mode='r') as h5_f:
            h5_inds = h5_f['/Measurement_000/Channel_001/Position_Indices']
            h5_vals = h5_f['/Measurement_000/Channel_001/Position_Values']

            with self.assertRaises(ValueError):
                _ = hdf_utils.get_unit_values(h5_inds,
                                              h5_vals,
                                              dim_names=['Y'])
Пример #5
0
 def test_dependent_dim(self):
     with h5py.File(data_utils.relaxation_path, mode='r') as h5_f:
         h5_inds = h5_f[
             '/Measurement_000/Channel_000/Spectroscopic_Indices']
         h5_vals = h5_f['/Measurement_000/Channel_000/Spectroscopic_Values']
         spec_dim_names = hdf_utils.get_attr(h5_inds, 'labels')
         ret_dict = hdf_utils.get_unit_values(h5_inds, h5_vals)
         for dim_ind, dim_name in enumerate(spec_dim_names):
             exp_val = hdf_utils.get_attr(h5_inds,
                                          'unit_vals_dim_' + str(dim_ind))
             act_val = ret_dict[dim_name]
             self.assertTrue(np.allclose(exp_val, act_val))
Пример #6
0
 def test_source_pos_single(self):
     with h5py.File(data_utils.std_beps_path, mode='r') as h5_f:
         h5_inds = h5_f['/Raw_Measurement/Position_Indices']
         h5_vals = h5_f['/Raw_Measurement/Position_Values']
         num_rows = 3
         expected = {'Y': np.float16(np.arange(num_rows) * 1.25)}
         ret_val = hdf_utils.get_unit_values(h5_inds,
                                             h5_vals,
                                             dim_names='Y')
         self.assertEqual(len(expected), len(ret_val))
         for key, exp in expected.items():
             self.assertTrue(np.allclose(exp, ret_val[key]))
Пример #7
0
    def test_incomp_dim_no_attr(self):
        # What should the user expect this function to do? throw an error.
        # Given that the unit values for each tile are different, it should throw a ValueError for X.
        # Even though we know Y is incomplete, it won't know since it wasn't looking at X.
        # However, now this function will automatically find unit values for ALL dimensions just to catch such scenarios
        with h5py.File(data_utils.incomplete_measurement_path,
                       mode='r') as h5_f:
            h5_inds = h5_f['/Measurement_000/Channel_000/Position_Indices']
            h5_vals = h5_f['/Measurement_000/Channel_000/Position_Values']

            with self.assertRaises(ValueError):
                _ = hdf_utils.get_unit_values(h5_inds, h5_vals)

            with self.assertRaises(ValueError):
                _ = hdf_utils.get_unit_values(h5_inds,
                                              h5_vals,
                                              dim_names=['X'])

            with self.assertRaises(ValueError):
                _ = hdf_utils.get_unit_values(h5_inds,
                                              h5_vals,
                                              dim_names=['Y'])
Пример #8
0
 def test_sparse_samp_no_attr(self):
     # What should the user expect this function to do? throw an error.
     # Without the attribute, this function will have no idea that it is looking at a sparse sampling case
     # it will return the first and second columns of vals blindly
     with h5py.File(data_utils.sparse_sampling_path, mode='r') as h5_f:
         h5_inds = h5_f['/Measurement_000/Channel_000/Position_Indices']
         h5_vals = h5_f['/Measurement_000/Channel_000/Position_Values']
         dim_names = hdf_utils.get_attr(h5_inds, 'labels')
         ret_dict = hdf_utils.get_unit_values(h5_inds, h5_vals)
         for dim_ind, dim_name in enumerate(dim_names):
             exp_val = h5_vals[:, dim_ind]
             act_val = ret_dict[dim_name]
             self.assertTrue(np.allclose(exp_val, act_val))
Пример #9
0
 def test_source_spec_all(self):
     with h5py.File(data_utils.std_beps_path, mode='r') as h5_f:
         h5_inds = h5_f['/Raw_Measurement/Spectroscopic_Indices']
         h5_vals = h5_f['/Raw_Measurement/Spectroscopic_Values']
         num_cycle_pts = 7
         expected = {
             'Bias':
             np.float32(2.5 * np.sin(
                 np.linspace(0, np.pi, num_cycle_pts, endpoint=False))),
             'Cycle': [0., 1.]
         }
         ret_val = hdf_utils.get_unit_values(h5_inds, h5_vals)
         self.assertEqual(len(expected), len(ret_val))
         for key, exp in expected.items():
             self.assertTrue(np.allclose(exp, ret_val[key]))
Пример #10
0
    def _read_data_chunk(self):
        """
        Returns the next chunk of data for the guess or the fit
        """

        # The Process class should take care of all the basic reading
        super(BELoopProjector, self)._read_data_chunk()

        if self.data is None:
            # Nothing we can do at this point
            return

        if self.verbose and self.mpi_rank == 0:
            print('BELoopProjector got raw data of shape {} from super'
                  '.'.format(self.data.shape))
        """
        Now self.data contains data for N pixels. 
        The challenge is that this may contain M FORC cycles 
        Each FORC cycle needs its own V DC vector
        So, we can't blindly use the inherited unit_compute. 
        Our variables now are Position, Vdc, FORC, all others

        We want M lists of [VDC x all other variables]

        The challenge is that VDC and FORC are inner dimensions - 
        neither the fastest nor the slowest (guaranteed)
        """

        spec_dim_order_s2f = get_sort_order(self.h5_main.h5_spec_inds)[::-1]

        # order_to_s2f = list(pos_dim_order_s2f) + list( len(pos_dim_order_s2f) + spec_dim_order_s2f)
        order_to_s2f = [0] + list(1 + spec_dim_order_s2f)
        print('Order for reshaping to S2F: {}'.format(order_to_s2f))

        self._dim_labels_s2f = list(['Positions']) + list(
            np.array(self.h5_main.spec_dim_labels)[spec_dim_order_s2f])

        print(self._dim_labels_s2f, order_to_s2f)

        self._num_forcs = int(
            any([
                targ in self.h5_main.spec_dim_labels
                for targ in ['FORC', 'FORC_Cycle']
            ]))
        if self._num_forcs:
            forc_pos = self.h5_main.spec_dim_labels.index(self._forc_dim_name)
            self._num_forcs = self.h5_main.spec_dim_sizes[forc_pos]
        print('Num FORCS: {}'.format(self._num_forcs))

        all_but_forc_rows = []
        for ind, dim_name in enumerate(self.h5_main.spec_dim_labels):
            if dim_name not in ['FORC', 'FORC_Cycle', 'FORC_repeat']:
                all_but_forc_rows.append(ind)
        print('All but FORC rows: {}'.format(all_but_forc_rows))

        dc_mats = []

        forc_mats = []

        num_reps = 1 if self._num_forcs == 0 else self._num_forcs
        for forc_ind in range(num_reps):
            print('')
            print('Working on FORC #{}'.format(forc_ind))
            if self._num_forcs:
                this_forc_spec_inds = \
                np.where(self.h5_main.h5_spec_inds[forc_pos] == forc_ind)[0]
            else:
                this_forc_spec_inds = np.ones(
                    shape=self.h5_main.h5_spec_inds.shape[1], dtype=np.bool)

            if self._num_forcs:
                this_forc_dc_vec = get_unit_values(
                    self.h5_main.h5_spec_inds[all_but_forc_rows]
                    [:, this_forc_spec_inds],
                    self.h5_main.h5_spec_vals[all_but_forc_rows]
                    [:, this_forc_spec_inds],
                    all_dim_names=list(
                        np.array(
                            self.h5_main.spec_dim_labels)[all_but_forc_rows]),
                    dim_names=self._fit_dim_name)
            else:
                this_forc_dc_vec = get_unit_values(
                    self.h5_main.h5_spec_inds,
                    self.h5_main.h5_spec_vals,
                    dim_names=self._fit_dim_name)
            this_forc_dc_vec = this_forc_dc_vec[self._fit_dim_name]
            dc_mats.append(this_forc_dc_vec)

            this_forc_2d = self.h5_main[:, this_forc_spec_inds]
            print('2D slice shape for this FORC: {}'.format(
                this_forc_2d.shape))
            """
            this_forc_nd, success = reshape_to_n_dims(this_forc_2d, 
                                                      h5_pos=self.h5_main.h5_pos_inds[:,:], # THis line will need to change
                                                      h5_spec=self.h5_main.h5_spec_inds[:, this_forc_spec_inds])
            """
            this_forc_nd, success = reshape_to_n_dims(
                this_forc_2d,
                h5_pos=None,
                # THis line will need to change
                h5_spec=self.h5_main.h5_spec_inds[:, this_forc_spec_inds])
            print(this_forc_nd.shape)

            this_forc_nd_s2f = this_forc_nd.transpose(
                order_to_s2f).squeeze()  # squeeze out FORC
            dim_names_s2f = self._dim_labels_s2f.copy()
            if self._num_forcs > 0:
                dim_names_s2f.remove(
                    self._forc_dim_name
                )  # because it was never there in the first place.
            print('Reordered to S2F: {}, {}'.format(this_forc_nd_s2f.shape,
                                                    dim_names_s2f))

            rest_dc_order = list(range(len(dim_names_s2f)))
            _dc_ind = dim_names_s2f.index(self._fit_dim_name)
            rest_dc_order.remove(_dc_ind)
            rest_dc_order = rest_dc_order + [_dc_ind]
            print('Transpose for reordering to rest, DC: {}'.format(
                rest_dc_order))

            rest_dc_nd = this_forc_nd_s2f.transpose(rest_dc_order)
            rest_dc_names = list(np.array(dim_names_s2f)[rest_dc_order])

            self._pre_flattening_shape = list(rest_dc_nd.shape)
            self._pre_flattening_dim_name_order = list(rest_dc_names)

            print('After reodering: {}, {}'.format(rest_dc_nd.shape,
                                                   rest_dc_names))

            dc_rest_2d = rest_dc_nd.reshape(np.prod(rest_dc_nd.shape[:-1]),
                                            np.prod(rest_dc_nd.shape[-1]))
            print('Shape after flattening to 2D: {}'.format(dc_rest_2d.shape))
            forc_mats.append(dc_rest_2d)

            self.data = forc_mats, dc_mats
Пример #11
0
    def _get_dc_offsets(h5_spec_inds,
                        h5_spec_vals,
                        fit_dim_name,
                        forc_dim_name,
                        verbose=False):
        # FORC is the decider whether or not DC_Offset changes.
        # FORC_Repeats etc. should not matter
        spec_unit_vals = get_unit_values(h5_spec_inds,
                                         h5_spec_vals,
                                         verbose=False)
        if forc_dim_name not in spec_unit_vals.keys():
            if verbose:
                print(
                    'This is not a FORC dataset. Just taking unit values for DC Offset'
                )
            dc_val_mat = np.expand_dims(spec_unit_vals[fit_dim_name], axis=0)
        else:
            # Reshape the Spec values matrix into an N dimensional array
            if verbose:
                print(
                    'This is a FORC dataset. Reshaping Spectroscopic Values to N dimensions'
                )
            ret_vals = reshape_to_n_dims(h5_spec_vals,
                                         np.expand_dims(np.arange(
                                             h5_spec_vals.shape[0]),
                                                        axis=1),
                                         h5_spec_inds,
                                         get_labels=True)
            spec_vals_nd, success, spec_nd_labels = ret_vals

            if success != True:
                raise ValueError(
                    'Unable to reshape Spectroscopic values to get DC offsets for each FORC'
                )

            # We will be using "in" quite a bit. So convert to list
            spec_nd_labels = list(spec_nd_labels)

            if verbose:
                print('Reshaped Spectroscopic Values to: {}'.format(
                    spec_vals_nd.shape))
                print(
                    'Spectroscopic dimension names: {}'.format(spec_nd_labels))

            # Note the indices of all other dimensions
            all_other_dims = set(range(len(spec_nd_labels))) - \
                             set([spec_nd_labels.index(fit_dim_name),
                                  spec_nd_labels.index(forc_dim_name)])
            # Set up a new order where FORC is at 0 and DC is at 1 and all
            # other dimensions (useless) follow
            new_order = [
                spec_nd_labels.index(forc_dim_name),
                spec_nd_labels.index(fit_dim_name)
            ] + list(all_other_dims)
            if verbose:
                print('Will transpose this N-dim matrix as: {}'.format(
                    new_order))

            # Apply this new order to the matrix and the labels
            spec_vals_nd = spec_vals_nd.transpose(new_order)
            spec_nd_labels = np.array(spec_nd_labels)[new_order]
            if verbose:
                print('After transpose shape and names:\n\t{}\n\t{}'.format(
                    spec_vals_nd.shape, spec_nd_labels))

            # Now remove all other dimensions using a list of slices:
            keep_list = [slice(None), slice(None)
                         ] + [slice(0, 1) for _ in range(len(all_other_dims))]
            # Don't forget to remove singular dimensions using squeeze
            dc_val_mat = spec_vals_nd[keep_list].squeeze()
            # Unnecessary but let's keep track of dimension names anyway
            spec_nd_labels = spec_nd_labels[:2]
            if verbose:
                print(
                    'After removing all other dimensions. Shape is: {} and dimensions are: {}'
                    .format(dc_val_mat.shape, spec_nd_labels))

        return dc_val_mat