Exemplo n.º 1
0
    def test_invalid_inputs(self):
        with self.assertRaises(TypeError):
            _ = write_utils.build_ind_val_matrices("not a list of arrays")

        with self.assertRaises(ValueError):
            _ = write_utils.build_ind_val_matrices([[0, 1],
                                                    np.random.randint(0,
                                                                      high=5,
                                                                      size=(3,
                                                                            4))
                                                    ])
Exemplo n.º 2
0
    def make_pos_vals_inds_dims(self):
        x_range = float(self.params_dictionary['XScanRange'])
        y_range = float(self.params_dictionary['YScanRange'])
        x_center = float(self.params_dictionary['xCenter'])
        y_center = float(self.params_dictionary['yCenter'])

        x_start = x_center - (x_range / 2)
        x_end = x_center + (x_range / 2)
        y_start = y_center - (y_range / 2)
        y_end = y_center + (y_range / 2)

        dx = x_range / self.x_len
        dy = y_range / self.y_len
        #assumes y scan direction:down; scan angle: 0 deg
        y_linspace = -np.arange(y_start, y_end, step=dy)
        x_linspace = np.arange(x_start, x_end, step=dx)
        pos_ind, pos_val = write_utils.build_ind_val_matrices(
            unit_values=(x_linspace, y_linspace), is_spectral=False)
        #usid.write_utils.Dimension uses ascii encoding, which can not encode
        # micron symbol, so we replace it, if present, with the letter u.
        pos_dims = [
            usid.write_utils.Dimension(
                'X', self.params_dictionary['XPhysUnit'].replace('\xb5', 'u'),
                self.x_len),
            usid.write_utils.Dimension(
                'Y', self.params_dictionary['YPhysUnit'].replace('\xb5', 'u'),
                self.y_len)
        ]
        self.pos_ind, self.pos_val, self.pos_dims = pos_ind, pos_val, pos_dims
Exemplo n.º 3
0
 def test_empty(self):
     inds, vals = write_utils.build_ind_val_matrices([[0]],
                                                     is_spectral=True)
     self.assertTrue(
         np.allclose(
             inds,
             write_utils.INDICES_DTYPE(np.expand_dims(np.arange(1), 0))))
     self.assertTrue(
         np.allclose(
             vals, write_utils.VALUES_DTYPE(np.expand_dims(np.arange(1),
                                                           0))))
Exemplo n.º 4
0
 def test_1D_pos(self):
     sine_val = np.sin(np.linspace(0, 2 * np.pi, 128))
     inds, vals = write_utils.build_ind_val_matrices([sine_val],
                                                     is_spectral=False)
     self.assertTrue(
         np.allclose(
             inds,
             write_utils.INDICES_DTYPE(
                 np.expand_dims(np.arange(len(sine_val)), axis=1))))
     self.assertTrue(
         np.allclose(
             vals, write_utils.VALUES_DTYPE(np.expand_dims(sine_val,
                                                           axis=1))))
Exemplo n.º 5
0
 def test_3D(self):
     max_v = 4
     half_pts = 8
     bi_triang = np.roll(np.hstack((np.linspace(-max_v, max_v, half_pts, endpoint=False),
                                    np.linspace(max_v, -max_v, half_pts, endpoint=False))), -half_pts // 2)
     cycles = [0, 1, 2]
     fields = [0, 1]
     exp_vals = np.vstack((np.tile(bi_triang, 6), np.tile(np.repeat(fields, 2 * half_pts), 3),
                           np.repeat(cycles, 2 * 2 * half_pts)))
     exp_inds = np.vstack((np.tile(np.arange(2 * half_pts), 6), np.tile(np.repeat(fields, 2 * half_pts), 3),
                           np.repeat(cycles, 2 * 2 * half_pts)))
     inds, vals = write_utils.build_ind_val_matrices([bi_triang, fields, cycles])
     self.assertTrue(np.allclose(exp_inds, inds))
     self.assertTrue(np.allclose(exp_vals, vals))
Exemplo n.º 6
0
    def make_pos_vals_inds_dims(self):
        x_range = float(self.params_dictionary['XScanRange'])
        y_range = float(self.params_dictionary['YScanRange'])
        x_center = float(self.params_dictionary['xCenter'])
        y_center = float(self.params_dictionary['yCenter'])

        x_start = x_center-(x_range/2); x_end = x_center+(x_range/2)
        y_start = y_center-(y_range/2); y_end = y_center+(y_range/2)

        dx = x_range/self.x_len
        dy = y_range/self.y_len
        #assumes y scan direction:down; scan angle: 0 deg
        y_linspace = -np.arange(y_start, y_end, step=dy)
        x_linspace = np.arange(x_start, x_end, step=dx)
        pos_ind, pos_val = write_utils.build_ind_val_matrices(unit_values=(x_linspace, y_linspace), is_spectral=False)
        #usid.write_utils.Dimension uses ascii encoding, which can not encode
        # micron symbol, so we replace it, if present, with the letter u.
        pos_dims = [usid.write_utils.Dimension('X', self.params_dictionary['XPhysUnit'].replace('\xb5', 'u'), self.x_len),
                    usid.write_utils.Dimension('Y', self.params_dictionary['YPhysUnit'].replace('\xb5', 'u'), self.y_len)]
        self.pos_ind, self.pos_val, self.pos_dims = pos_ind, pos_val, pos_dims
Exemplo n.º 7
0
def make_relaxation_file():
    if os.path.exists(relaxation_path):
        os.remove(relaxation_path)

    with h5py.File(relaxation_path, mode='w') as h5_f:
        h5_meas_grp = h5_f.create_group('Measurement_000')

        num_rows = 2
        num_cols = 11

        pos_inds = np.vstack(
            (np.tile(np.arange(num_cols),
                     num_rows), np.repeat(np.arange(num_rows), num_cols))).T
        # make the values more interesting:
        pos_vals = np.vstack((pos_inds[:, 0] * 50, pos_inds[:, 1] * 1.25)).T

        pos_attrs = {'units': ['nm', 'um'], 'labels': ['X', 'Y']}

        h5_pos_inds = h5_meas_grp.create_dataset('Position_Indices',
                                                 data=pos_inds,
                                                 dtype=np.uint16)
        h5_pos_vals = h5_meas_grp.create_dataset('Position_Values',
                                                 data=pos_vals,
                                                 dtype=np.float32)

        for dset in [h5_pos_inds, h5_pos_vals]:
            write_aux_reg_ref(dset, pos_attrs['labels'], is_spec=False)
            write_string_list_as_attr(dset, pos_attrs)

        spec_attrs = {
            'labels': ['Frequency', 'Repeats', 'DC_Offset', 'Field'],
            'units': ['kHz', 'a. u.', 'V', 'a.u.']
        }

        freq_pts = 3
        repeats = 5
        dc_offsets = 7
        field_inds = 1

        spec_unit_vals = [
            np.linspace(320, 340, freq_pts),
            np.arange(repeats), 3 * np.pi * np.linspace(0, 1, dc_offsets),
            np.array([1, 0])
        ]

        spec_ind_mat, spec_val_mat = build_ind_val_matrices(
            spec_unit_vals[:-1])

        # Manually creating the field array that starts with 1
        field_ind_unit = np.hstack(
            ([0], np.ones(repeats - field_inds, dtype=np.uint16)))
        field_val_unit = np.hstack(
            ([1], np.zeros(repeats - field_inds, dtype=np.uint16)))

        # Manually appending to the indices and values table
        spec_ind_mat = np.vstack((spec_ind_mat,
                                  np.tile(np.repeat(field_ind_unit, freq_pts),
                                          dc_offsets)))

        spec_val_mat = np.vstack((spec_val_mat,
                                  np.tile(np.repeat(field_val_unit, freq_pts),
                                          dc_offsets)))

        spec_unit_vals_dict = dict()
        for dim_ind, dim_unit_vals in enumerate(spec_unit_vals):
            spec_unit_vals_dict['unit_vals_dim_' +
                                str(dim_ind)] = dim_unit_vals

        h5_chan_grp_1 = h5_meas_grp.create_group('Channel_000')
        h5_chan_grp_2 = h5_meas_grp.create_group('Channel_001')

        for h5_chan_grp, add_attribute in zip([h5_chan_grp_1, h5_chan_grp_2],
                                              [False, True]):

            h5_spec_inds = h5_chan_grp.create_dataset('Spectroscopic_Indices',
                                                      data=spec_ind_mat,
                                                      dtype=np.uint16)
            h5_spec_vals = h5_chan_grp.create_dataset('Spectroscopic_Values',
                                                      data=spec_val_mat,
                                                      dtype=np.float32)

            this_spec_attrs = spec_attrs.copy()
            if add_attribute:
                this_spec_attrs.update({'dependent_dimensions': ['Field']})

            for dset in [h5_spec_inds, h5_spec_vals]:
                write_aux_reg_ref(dset, spec_attrs['labels'], is_spec=True)
                write_string_list_as_attr(dset, this_spec_attrs)
                # Write the unit values as attributes - testing purposes only:
                write_safe_attrs(dset, spec_unit_vals_dict)

            h5_main = h5_chan_grp.create_dataset(
                'Raw_Data',
                data=np.random.rand(num_rows * num_cols,
                                    freq_pts * repeats * dc_offsets),
                dtype=np.float32)

            # Write mandatory attributes:
            write_safe_attrs(h5_main, {
                'units': 'V',
                'quantity': 'Cantilever Deflection'
            })

            # Link ancillary
            for dset in [h5_pos_inds, h5_pos_vals, h5_spec_inds, h5_spec_vals]:
                h5_main.attrs[dset.name.split('/')[-1]] = dset.ref
Exemplo n.º 8
0
def build_ind_val_dsets(dimensions,
                        is_spectral=True,
                        verbose=False,
                        base_name=None):
    """
    Creates VirtualDatasets for the position or spectroscopic indices and values of the data.
    Remember that the contents of the dataset can be changed if need be after the creation of the datasets.
    For example if one of the spectroscopic dimensions (e.g. - Bias) was sinusoidal and not linear, The specific
    dimension in the Spectroscopic_Values dataset can be manually overwritten.

    Parameters
    ----------
    dimensions : Dimension or array-like of Dimension objects
        Sequence of Dimension objects that provides all necessary instructions for constructing the indices and values
        datasets
    is_spectral : bool, optional. default = True
        Spectroscopic (True) or Position (False)
    verbose : Boolean, optional
        Whether or not to print statements for debugging purposes
    base_name : str / unicode, optional
        Prefix for the datasets. Default: 'Position\_' when is_spectral is False, 'Spectroscopic\_' otherwise

    Returns
    -------
    ds_inds : VirtualDataset
            Reduced Spectroscopic indices dataset
    ds_vals : VirtualDataset
            Reduces Spectroscopic values dataset

    Notes
    -----
    `steps`, `initial_values`, `labels`, and 'units' must be the same length as
    `dimensions` when they are specified.

    Dimensions should be in the order from fastest varying to slowest.
    """

    warn(
        'build_ind_val_dsets is available only for legacy purposes and will be REMOVED in a future release.\n'
        'Please consider using write_ind_val_dsets in hdf_utils instead',
        DeprecationWarning)

    if isinstance(dimensions, Dimension):
        dimensions = [dimensions]
    if not isinstance(dimensions, (list, np.ndarray, tuple)):
        raise TypeError('dimensions should be array-like ')
    if not np.all([isinstance(x, Dimension) for x in dimensions]):
        raise TypeError('dimensions should be a sequence of Dimension objects')

    if base_name is not None:
        if not isinstance(base_name, (str, unicode)):
            raise TypeError('base_name should be a string')
        if not base_name.endswith('_'):
            base_name += '_'
    else:
        base_name = 'Position_'
        if is_spectral:
            base_name = 'Spectroscopic_'

    unit_values = [x.values for x in dimensions]

    indices, values = build_ind_val_matrices(unit_values,
                                             is_spectral=is_spectral)

    if verbose:
        print('Indices:')
        print(indices)
        print('Values:')
        print(values)

    # Create the slices that will define the labels
    region_slices = get_aux_dset_slicing([x.name for x in dimensions],
                                         is_spectroscopic=is_spectral)

    # Create the VirtualDataset for both Indices and Values
    ds_indices = VirtualDataset(base_name + 'Indices',
                                indices,
                                dtype=INDICES_DTYPE)
    ds_values = VirtualDataset(base_name + 'Values',
                               VALUES_DTYPE(values),
                               dtype=VALUES_DTYPE)

    for dset in [ds_indices, ds_values]:
        dset.attrs['labels'] = region_slices
        dset.attrs['units'] = [x.units for x in dimensions]

    return ds_indices, ds_values
Exemplo n.º 9
0
def build_ind_val_dsets(dimensions, is_spectral=True, verbose=False, base_name=None):
    """
    Creates VirtualDatasets for the position or spectroscopic indices and values of the data.
    Remember that the contents of the dataset can be changed if need be after the creation of the datasets.
    For example if one of the spectroscopic dimensions (e.g. - Bias) was sinusoidal and not linear, The specific
    dimension in the Spectroscopic_Values dataset can be manually overwritten.

    Parameters
    ----------
    dimensions : Dimension or array-like of Dimension objects
        Sequence of Dimension objects that provides all necessary instructions for constructing the indices and values
        datasets
    is_spectral : bool, optional. default = True
        Spectroscopic (True) or Position (False)
    verbose : Boolean, optional
        Whether or not to print statements for debugging purposes
    base_name : str / unicode, optional
        Prefix for the datasets. Default: 'Position\_' when is_spectral is False, 'Spectroscopic\_' otherwise

    Returns
    -------
    ds_inds : VirtualDataset
            Reduced Spectroscopic indices dataset
    ds_vals : VirtualDataset
            Reduces Spectroscopic values dataset

    Notes
    -----
    `steps`, `initial_values`, `labels`, and 'units' must be the same length as
    `dimensions` when they are specified.

    Dimensions should be in the order from fastest varying to slowest.
    """

    warn('build_ind_val_dsets is available only for legacy purposes and will be REMOVED in a future release.\n'
         'Please consider using write_ind_val_dsets in hdf_utils instead', DeprecationWarning)

    if isinstance(dimensions, Dimension):
        dimensions = [dimensions]
    if not isinstance(dimensions, (list, np.ndarray, tuple)):
        raise TypeError('dimensions should be array-like ')
    if not np.all([isinstance(x, Dimension) for x in dimensions]):
        raise TypeError('dimensions should be a sequence of Dimension objects')

    if base_name is not None:
        if not isinstance(base_name, (str, unicode)):
            raise TypeError('base_name should be a string')
        if not base_name.endswith('_'):
            base_name += '_'
    else:
        base_name = 'Position_'
        if is_spectral:
            base_name = 'Spectroscopic_'

    unit_values = [x.values for x in dimensions]

    indices, values = build_ind_val_matrices(unit_values, is_spectral=is_spectral)

    if verbose:
        print('Indices:')
        print(indices)
        print('Values:')
        print(values)

    # Create the slices that will define the labels
    region_slices = get_aux_dset_slicing([x.name for x in dimensions], is_spectroscopic=is_spectral)

    # Create the VirtualDataset for both Indices and Values
    ds_indices = VirtualDataset(base_name + 'Indices', indices, dtype=INDICES_DTYPE)
    ds_values = VirtualDataset(base_name + 'Values', VALUES_DTYPE(values), dtype=VALUES_DTYPE)

    for dset in [ds_indices, ds_values]:
        dset.attrs['labels'] = region_slices
        dset.attrs['units'] = [x.units for x in dimensions]

    return ds_indices, ds_values