Exemplo n.º 1
0
    def test_writing_and_reading_dicts_to_hdf5_int_keys(self):
        test_dict = {
            0: {
                "name": "I",
                "theta": 0,
                "phi": 0,
                "type": "ge"
            },
            1: {
                "name": "rX180",
                "theta": 180,
                "phi": 0,
                "type": "ge"
            }
        }
        data_object = h5d.Data(name='test_object', datadir=self.datadir)
        h5d.write_dict_to_hdf5(test_dict, data_object)
        data_object.close()
        filepath = data_object.filepath

        new_dict = {}
        opened_hdf5_file = h5py.File(filepath, 'r')
        h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)

        self.assertEqual(test_dict.keys(), new_dict.keys())
        self.assertEqual(test_dict[0], new_dict[0])
Exemplo n.º 2
0
    def test_storing_and_loading_station_snapshot(self):
        """
        Stores and writes a station (instrument) snapshot.
        """

        self.mock_parabola_2.x(1)
        self.mock_parabola_2.y(2.245)
        self.mock_parabola_2.array_like(np.linspace(0, 11, 23))

        snap = self.station.snapshot(update=True)
        data_object = h5d.Data(name='test_object_snap', datadir=self.datadir)
        h5d.write_dict_to_hdf5(snap, data_object)
        data_object.close()
        filepath = data_object.filepath

        new_dict = {}
        opened_hdf5_file = h5py.File(filepath, 'r')
        h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)

        self.assertEqual(snap.keys(), new_dict.keys())
        self.assertEqual(snap['instruments'].keys(),
                         new_dict['instruments'].keys())
        mock_parab_pars = snap['instruments']['mock_parabola_2']['parameters']

        self.assertEqual(mock_parab_pars['x']['value'], 1)
        self.assertEqual(mock_parab_pars['y']['value'], 2.245)
        np.testing.assert_array_equal(mock_parab_pars['array_like']['value'],
                                      np.linspace(0, 11, 23))
    def test_save_exp_metadata(self):
        metadata_dict = {
            'intParam': 1,
            'floatParam': 2.5e-3,
            'strParam': 'spam',
            'listParam': [1, 2, 3, 4],
            'arrayParam': np.array([4e5, 5e5]),
            'dictParam': {'a': 1, 'b': 2},
            'tupleParam': (3, 'c')
        }

        old_a_tools_datadir = a_tools.datadir
        a_tools.datadir = self.MC.datadir()

        sweep_pts = np.linspace(0, 10, 30)
        self.MC.set_sweep_function(None_Sweep())
        self.MC.set_sweep_points(sweep_pts)
        self.MC.set_detector_function(det.Dummy_Detector_Soft())
        self.MC.run('test_exp_metadata', exp_metadata=metadata_dict)
        a = ma.MeasurementAnalysis(label='test_exp_metadata', auto=False)

        a_tools.datadir = old_a_tools_datadir

        loaded_dict = read_dict_from_hdf5(
            {}, a.data_file['Experimental Data']['Experimental Metadata'])

        np.testing.assert_equal(metadata_dict, loaded_dict)
Exemplo n.º 4
0
    def test_save_exp_metadata(self):
        metadata_dict = {
            "intParam": 1,
            "floatParam": 2.5e-3,
            "strParam": "spam",
            "listParam": [1, 2, 3, 4],
            "arrayParam": np.array([4e5, 5e5]),
            "dictParam": {
                "a": 1,
                "b": 2
            },
            "tupleParam": (3, "c"),
        }

        old_a_tools_datadir = a_tools.datadir
        a_tools.datadir = self.MC.datadir()

        sweep_pts = np.linspace(0, 10, 30)
        self.MC.set_sweep_function(None_Sweep())
        self.MC.set_sweep_points(sweep_pts)
        self.MC.set_detector_function(det.Dummy_Detector_Soft())
        self.MC.run("test_exp_metadata", exp_metadata=metadata_dict)
        a = ma.MeasurementAnalysis(label="test_exp_metadata", auto=False)

        a_tools.datadir = old_a_tools_datadir

        loaded_dict = read_dict_from_hdf5(
            {}, a.data_file["Experimental Data"]["Experimental Metadata"])

        np.testing.assert_equal(metadata_dict, loaded_dict)
def test_wr_rd_hdf5_array():
    datadir = os.path.join(pq.__path__[0], 'tests', 'test_data')
    test_dict = {
        'x': np.linspace(0, 1, 14),
        'y': np.cos(np.linspace(0, 2*np.pi, 11))}
    data_object = h5d.Data(name='test_object', datadir=datadir)
    h5d.write_dict_to_hdf5(test_dict, data_object)
    data_object.close()
    filepath = data_object.filepath

    new_dict = {}
    opened_hdf5_file = h5py.File(filepath, 'r')
    h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)

    assert test_dict.keys() == new_dict.keys()
    np.testing.assert_allclose(test_dict['x'], new_dict['x'])
    np.testing.assert_allclose(test_dict['y'], new_dict['y'])
Exemplo n.º 6
0
def get_param_from_metadata_group(timestamp=None, param_name=None, file_id=None,
                                  data_file=None, close_file=True, mode='r'):
    """
    Get a parameter with param_name from the Experimental Metadata group in
    the HDF5 file specified by timestamp, or return the whole group if
    param_name is None.
    :param timestamp: (str) measurement timestamp of form YYYYMMDD_hhmmsss
    :param param_name: (str) name of a key in Experimental Metadata group
    :param data_file: (HDF file) opened HDF5 file
    :param close_file: (bool) whether to close the HDF5 file
    :return: the value of the param_name or the whole experimental metadata
    dictionary
    """
    if data_file is None:
        if timestamp is None:
            raise ValueError('Please provide either timestamp or data_file.')
        folder = a_tools.get_folder(timestamp)
        h5filepath = a_tools.measurement_filename(folder, file_id=file_id)
        data_file = h5py.File(h5filepath, mode)

    try:
        if param_name is None:
            group = data_file['Experimental Data']
            return read_dict_from_hdf5({}, group['Experimental Metadata'])

        group = data_file['Experimental Data']['Experimental Metadata']
        if param_name in group:
            group = group[param_name]
            param_value = OrderedDict()
            if isinstance(group, h5py._hl.dataset.Dataset):
                param_value = list(np.array(group).flatten())
                param_value = [x.decode('utf-8') if isinstance(x, bytes)
                               else x for x in param_value]
            else:
                param_value = read_dict_from_hdf5(param_value, group)
        elif param_name in group.attrs:
            param_value = get_hdf_param_value(group, param_name)
        else:
            raise KeyError(f'{param_name} was not found in metadata.')
        if close_file:
            data_file.close()
    except Exception as e:
        data_file.close()
        raise e
    return param_value
Exemplo n.º 7
0
def load_settings_onto_instrument_v2(
    instrument,
    load_from_instr: str = None,
    label: str = "",
    filepath: str = None,
    timestamp: str = None,
    ignore_pars: set = None,
):
    """
    Loads settings from an hdf5 file onto the instrument handed to the
    function. By default uses the last hdf5 file in the datadirectory.
    By giving a label or timestamp another file can be chosen as the
    settings file.

    Args:
        instrument (instrument) : instrument onto which settings should be
            loaded
        load_from_instr (str) : optional name of another instrument from
            which to load the settings.
        label (str)           : label used for finding the last datafile
        filepath (str)        : exact filepath of the hdf5 file to load.
            if filepath is specified, this takes precedence over the file
            locating options (label, timestamp etc.).
        timestamp (str)       : timestamp of file in the datadir


    """

    older_than = None
    folder = None
    instrument_name = instrument.name
    success = False
    count = 0
    # Will try multiple times in case the last measurements failed and
    # created corrupt data files.
    while success is False and count < 3:
        if filepath is None:
            folder = a_tools.get_folder(
                timestamp=timestamp, label=label, older_than=older_than
            )
            filepath = a_tools.measurement_filename(folder)
        try:

            f = h5py.File(filepath, "r")
            snapshot = {}
            read_dict_from_hdf5(snapshot, h5_group=f["Snapshot"])

            if load_from_instr is None:
                ins_group = snapshot["instruments"][instrument_name]
            else:
                ins_group = snapshot["instruments"][load_from_instr]
            success = True
        except Exception as e:
            logging.warning("Exception occured reading from {}".format(folder))
            logging.warning(e)
            # This check makes this snippet a bit more robust
            if folder is not None:
                older_than = (
                    os.path.split(folder)[0][-8:] + "_" + os.path.split(folder)[1][:6]
                )
            # important to set all to None, otherwise the try except loop
            # will not look for an earlier data file
            folder = None
            filepath = None
            success = False
        count += 1

    if not success:
        logging.warning(
            'Could not open settings for instrument "%s"' % (instrument_name)
        )
        return False

    for parname, par in ins_group["parameters"].items():
        try:
            if hasattr(instrument.parameters[parname], "set") and (
                par["value"] is not None
            ):
                if ignore_pars is None or parname not in ignore_pars:
                    par_value = par["value"]
                    if type(par_value) == str:
                        try:
                            instrument.parameters[parname].validate(par_value)
                        except TypeError:
                            # This detects that in the hdf5 file the parameter
                            # was saved as string due to type incompatibility
                            par_value = eval(par_value)
                    instrument.set(parname, par_value)
        except Exception as e:
            print(
                'Could not set parameter: "{}" to "{}" '
                'for instrument "{}"'.format(parname, par["value"], instrument_name)
            )
            logging.warning(e)
    f.close()
    return True
Exemplo n.º 8
0
def load_settings_onto_instrument_v2(instrument,
                                     load_from_instr: str = None,
                                     label: str = '',
                                     filepath: str = None,
                                     timestamp: str = None):
    '''
    Loads settings from an hdf5 file onto the instrument handed to the
    function. By default uses the last hdf5 file in the datadirectory.
    By giving a label or timestamp another file can be chosen as the
    settings file.

    Args:
        instrument (instrument) : instrument onto which settings should be
            loaded
        load_from_instr (str) : optional name of another instrument from
            which to load the settings.
        label (str)           : label used for finding the last datafile
        filepath (str)        : exact filepath of the hdf5 file to load.
            if filepath is specified, this takes precedence over the file
            locating options (label, timestamp etc.).
        timestamp (str)       : timestamp of file in the datadir


    '''

    older_than = None
    folder = None
    instrument_name = instrument.name
    success = False
    count = 0
    # Will try multiple times in case the last measurements failed and
    # created corrupt data files.
    while success is False and count < 3:
        if filepath is None:
            folder = a_tools.get_folder(timestamp=timestamp,
                                        label=label,
                                        older_than=older_than)
            filepath = a_tools.measurement_filename(folder)
        try:

            f = h5py.File(filepath, 'r')
            snapshot = {}
            h5d.read_dict_from_hdf5(snapshot, h5_group=f['Snapshot'])

            if load_from_instr is None:
                ins_group = snapshot['instruments'][instrument_name]
            else:
                ins_group = snapshot['instruments'][load_from_instr]
            success = True
        except Exception as e:
            logging.warning('Exception occured reading from {}'.format(folder))
            logging.warning(e)
            # This check makes this snippet a bit more robust
            if folder is not None:
                older_than = os.path.split(folder)[0][-8:] \
                    + '_' + os.path.split(folder)[1][:6]
            # important to set all to None, otherwise the try except loop
            # will not look for an earlier data file
            folder = None
            filepath = None
            success = False
        count += 1

    if not success:
        logging.warning('Could not open settings for instrument "%s"' %
                        (instrument_name))
        return False

    for parname, par in ins_group['parameters'].items():
        try:
            if hasattr(instrument.parameters[parname], 'set'):
                instrument.set(parname, par['value'])
        except Exception as e:
            print('Could not set parameter: "{}" to "{}" '
                  'for instrument "{}"'.format(parname, par['value'],
                                               instrument_name))
            logging.warning(e)
    f.close()
    return True
Exemplo n.º 9
0
    def test_writing_and_reading_dicts_to_hdf5(self):
        """
        Tests dumping some random dictionary to hdf5 and reading back the
        stored values. The input dictionary contains:
            - list of ints
            - list of floats
            - nested dict
            - 1D array
            - 2D array

        """
        test_dict = {
            'list_of_ints': list(np.arange(5)),
            'list_of_floats': list(np.arange(5.1)),
            'some_bool': True,
            'weird_dict': {'a': 5},
            'dataset1': np.linspace(0, 20, 31),
            'dataset2': np.array([[2, 3, 4, 5],
                                  [2, 3, 1, 2]]),
            'list_of_mixed_type': ['hello', 4, 4.2, {'a': 5}, [4, 3]],
            'tuple_of_mixed_type': tuple(['hello', 4, 4.2, {'a': 5}, [4, 3]]),
            'a list of strings': ['my ', 'name ', 'is ', 'earl.'],
            'some_np_bool': np.bool(True),
            'list_of_dicts': [{'a': 5}, {'b': 3}],
            'some_int': 3,
            'some_float': 3.5,
            'some_np_int': np.int(3),
            'some_np_float': np.float(3.5)
        }

        data_object = h5d.Data(name='test_object', datadir=self.datadir)
        h5d.write_dict_to_hdf5(test_dict, data_object)
        data_object.close()
        filepath = data_object.filepath

        new_dict = {}
        opened_hdf5_file = h5py.File(filepath, 'r')
        try:
            h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)
            # objects are not identical but the string representation should be
            self.assertEqual(test_dict.keys(), new_dict.keys())
            self.assertEqual(test_dict['list_of_ints'], new_dict['list_of_ints'])
            self.assertEqual(test_dict['list_of_floats'],
                             new_dict['list_of_floats'])
            self.assertEqual(test_dict['weird_dict'], new_dict['weird_dict'])
            self.assertEqual(test_dict['some_bool'], new_dict['some_bool'])

            self.assertEqual(test_dict['list_of_dicts'],
                             new_dict['list_of_dicts'])

            self.assertEqual(test_dict['list_of_mixed_type'],
                             new_dict['list_of_mixed_type'])
            self.assertEqual(test_dict['list_of_mixed_type'][0],
                             new_dict['list_of_mixed_type'][0])
            self.assertEqual(test_dict['list_of_mixed_type'][2],
                             new_dict['list_of_mixed_type'][2])

            self.assertEqual(test_dict['tuple_of_mixed_type'],
                             new_dict['tuple_of_mixed_type'])
            self.assertEqual(type(test_dict['tuple_of_mixed_type']),
                             type(new_dict['tuple_of_mixed_type']))
            self.assertEqual(test_dict['tuple_of_mixed_type'][0],
                             new_dict['tuple_of_mixed_type'][0])
            self.assertEqual(test_dict['tuple_of_mixed_type'][2],
                             new_dict['tuple_of_mixed_type'][2])

            self.assertEqual(test_dict['some_np_bool'],
                             new_dict['some_np_bool'])
            self.assertEqual(test_dict['some_int'], new_dict['some_int'])
            self.assertEqual(test_dict['some_np_float'], new_dict['some_np_float'])
            self.assertEqual(test_dict['a list of strings'],
                             new_dict['a list of strings'])
            self.assertEqual(test_dict['a list of strings'][0],
                             new_dict['a list of strings'][0])
            opened_hdf5_file.close()
        except Exception as e:
            opened_hdf5_file.close()
            raise e
Exemplo n.º 10
0
def get_params_from_hdf_file(data_dict, params_dict=None, numeric_params=None,
                             add_param_method=None, folder=None, **params):
    """
    Extracts the parameter provided in params_dict from an HDF file
    and saves them in data_dict.
    :param data_dict: OrderedDict where parameters and their values are saved
    :param params_dict: OrderedDict with key being the parameter name that will
        be used as key in data_dict for this parameter, and value being a
        parameter name or a path + parameter name indie the HDF file.
    :param numeric_params: list of parameter names from amount the keys of
        params_dict. This specifies that those parameters are numbers and will
        be converted to floats.
    :param folder: path to file from which data will be read
    :param params: keyword arguments:
        append_value (bool, default: True): whether to append an
            already-existing key
        update_value (bool, default: False): whether to replace an
            already-existing key
        h5mode (str, default: 'r+'): reading mode of the HDF file
        close_file (bool, default: True): whether to close the HDF file(s)
    """
    if params_dict is None:
        params_dict = get_param('params_dict', data_dict, raise_error=True,
                                **params)
    if numeric_params is None:
        numeric_params = get_param('numeric_params', data_dict,
                                   default_value=[], **params)

    # if folder is not specified, will take the last folder in the list from
    # data_dict['folders']
    if folder is None:
        folder = get_param('folders', data_dict, raise_error=True, **params)
        if len(folder) > 0:
            folder = folder[-1]

    h5mode = get_param('h5mode', data_dict, default_value='r', **params)
    h5filepath = a_tools.measurement_filename(folder, **params)
    data_file = h5py.File(h5filepath, h5mode)

    try:
        for save_par, file_par in params_dict.items():
            epd = data_dict
            all_keys = save_par.split('.')
            for i in range(len(all_keys)-1):
                if all_keys[i] not in epd:
                    epd[all_keys[i]] = OrderedDict()
                epd = epd[all_keys[i]]

            if isinstance(epd, list):
                epd = epd[-1]

            if file_par == 'measurementstring':
                add_param(all_keys[-1],
                          [os.path.split(folder)[1][7:]],
                          epd, add_param_method='append')
                continue

            group_name = '/'.join(file_par.split('.')[:-1])
            par_name = file_par.split('.')[-1]
            if group_name == '':
                group = data_file
                attrs = []
            else:
                group = data_file[group_name]
                attrs = list(group.attrs)

            if group_name in data_file or group_name == '':
                if par_name in attrs:
                    add_param(all_keys[-1],
                              get_hdf_param_value(group,
                                                  par_name),
                              epd, add_param_method=add_param_method)
                elif par_name in list(group.keys()) or file_par == '':
                    par = group[par_name] if par_name != '' else group
                    if isinstance(par,
                                  h5py._hl.dataset.Dataset):
                        add_param(all_keys[-1],
                                  np.array(par),
                                  epd, add_param_method=add_param_method)
                    else:
                        add_param(all_keys[-1],
                                  read_dict_from_hdf5(
                                      {}, par),
                                  epd, add_param_method=add_param_method)

            if all_keys[-1] not in epd:
                # search through the attributes of all groups
                for group_name in data_file.keys():
                    if par_name in list(data_file[group_name].attrs):
                        add_param(all_keys[-1],
                                  get_hdf_param_value(data_file[group_name],
                                                      par_name),
                                  epd, add_param_method=add_param_method)

            if all_keys[-1] not in epd:
                log.warning(f'Parameter {file_par} was not found.')
        data_file.close()
    except Exception as e:
        data_file.close()
        raise e

    for par_name in data_dict:
        if par_name in numeric_params:
            if hasattr(data_dict[par_name], '__iter__'):
                data_dict[par_name] = [np.float(p) for p
                                       in data_dict[par_name]]
                data_dict[par_name] = np.asarray(data_dict[par_name])
            else:
                data_dict[par_name] = np.float(data_dict[par_name])

    if get_param('close_file', data_dict, default_value=True, **params):
        data_file.close()
    else:
        if 'data_files' in data_dict:
            data_dict['data_files'] += [data_file]
        else:
            data_dict['data_files'] = [data_file]
    return data_dict