def exp_info(self): """Dictionary of experiment info""" # initialize exp_info = { "investigator": "", "exp name": "", "exp description": "", "exp set name": "", "exp set description": "", } # assign values for key, val in self._hdf_obj[ self.DEVICE_PATHS["digitizer"]].attrs.items(): if isinstance(val, (np.bytes_, bytes)): val = _bytes_to_str(val) if key == "Investigator": exp_info["investigator"] = val elif key == "Experiment name": exp_info["exp name"] = val elif key == "Experiment description": exp_info["exp description"] = val elif key == "Experiment set name": exp_info["exp set name"] = val elif key == "Experiment set description": exp_info["exp set description"] = val # return return exp_info
def __repr__(self): filename = self._hdf_obj.filename if isinstance(filename, (bytes, np.bytes_)): filename = _bytes_to_str(filename) filename = os.path.basename(filename) rstr = f"<{self.__class__.__name__} of HDF5 file '{filename}'>" return rstr
def run_info(self): """Dictionary of experimental run info.""" # initialize run_info = { "run name": "", "run description": "", "run status": "", "run date": "", } # assign values for key, val in self._hdf_obj[ self.DEVICE_PATHS["digitizer"]].attrs.items(): if isinstance(val, (np.bytes_, bytes)): val = _bytes_to_str(val) if key == "Data run": run_info["run name"] = val elif key == "Description": run_info["run description"] = val elif key == "Status": run_info["run status"] = val elif key == "Status date": run_info["run date"] = val # return return run_info
def _set_attrs(self, config_name, config_number): """ Sets attributes for the control group and its sub-members """ self[config_name].attrs.update({ "GPIB address": np.uint32(0), "Generator type": np.bytes_("Agilent 33220A - LAN"), "IP address": np.bytes_(f"192.168.1.{config_number}"), "Initial state": np.bytes_("*RST;" ":FUNC:SQU:DCYC +5.0000000000000E+01;" ":FUNC SQU;:FUNC:USER EXP_RISE;" ':DISP:TEXT "";' ":DISP 1;" "*ESE +0;" "*PSC 1;" "*SRE +0\n"), "Waveform command list": np.bytes_( "FREQ 40000.000000 \nFREQ 80000.000000 \nFREQ 120000.000000 \n" ), }) # add command list to _configs cl = self[config_name].attrs["Waveform command list"] cl = _bytes_to_str(cl).splitlines() cl = [command.strip() for command in cl] self._configs[config_name]["command list"] = cl
def test_valid_vals(self): conditions = [ ("Hello", "Hello"), (b"Goodbye", "Goodbye"), ] for inputs, expected in conditions: with self.subTest(inputs=inputs, expected=expected): self.assertEqual(_bytes_to_str(inputs), expected)
def lapd_version(self) -> Union[None, str]: """LaPD HDF5 version string.""" try: vers = _bytes_to_str( self._hdf_obj.attrs["LaPD HDF5 software version"]) except KeyError: vers = None return vers
def assertSNSuite(self, shotnum, index, sni, cdset, shotnumkey, configkey, cconfn): """Suite of assertions for shot number conditioning""" # shotnum - original requested shot number # index - index of dataset # sni - boolean mask for shotnum # shotnum[sni] = cdset[index, shotnumkey] # cdset - control devices dataset # shotnumkey - field in cdset that corresponds to shot numbers # configkey - field in cdset that corresponds to configuration # names # cconfn - configuration name for control device # # all return variables should be np.ndarray self.assertTrue(isinstance(index, np.ndarray)) self.assertTrue(isinstance(sni, np.ndarray)) # all should be 1D arrays self.assertEqual(index.ndim, 1) self.assertEqual(sni.ndim, 1) # equate array sizes self.assertEqual(shotnum.size, sni.size) self.assertEqual(np.count_nonzero(sni), index.size) # shotnum[sni] = cdset[index, shotnumkey] if index.size != 0: self.assertTrue( np.array_equal(shotnum[sni], cdset[index.tolist(), shotnumkey])) else: self.assertEqual(shotnum[sni].size, 0) # ensure correct config is grabbed if index.size != 0: cname_arr = cdset[index.tolist(), configkey] for name in cname_arr: self.assertEqual(_bytes_to_str(name), cconfn)
def _set_subgroup_attrs(self, config_name, config_number): """ Sets attributes for the control sub-groups """ self[config_name].attrs.update({ "IP address": np.bytes_(f"192.168.7.{config_number}"), "Initialization commands": np.bytes_("*RST;*WAI;OUTPUT ON;VOLTAGE 0.0;CURRENT 1.0"), "Model Number": np.bytes_("N5751A"), "N5700 power supply command list": np.bytes_("SOURCE:VOLTAGE:LEVEL 20.0000000 \n" "SOURCE:VOLTAGE:LEVEL 30.0000000 \n" "SOURCE:VOLTAGE:LEVEL 40.0000000 \n" "SOURCE:VOLTAGE:LEVEL 5.0000000 \n"), }) # add command list to _configs cl = self[config_name].attrs["N5700 power supply command list"] cl = _bytes_to_str(cl).splitlines() cl = [command.strip() for command in cl] self._configs[config_name]["command list"] = cl
def _build_configs(self): """Build the :attr:`configs` dictionary""" # Assumptions: # 1. only one NI_XYZ drive was ever built, so there will always # be only one configuration # - naming configuration 'config01' # 2. there's only one dataset ever created 'Run time list' # 3. there can be multiple motion lists defined # - each sub-group is a configuration for a different # motion list # - the name of the sub-group is the name of the motion list # # initialize configuration cname = "config01" self.configs[cname] = {} # check there are existing motion lists if len(self.subgroup_names) == 0: warn( f"{self.info['group path']}: no defining motion list groups exist" ) # get dataset try: dset = self.group[self.construct_dataset_name()] except KeyError: why = f"Dataset '{self.construct_dataset_name()}' not found" raise HDFMappingError(self.info["group path"], why=why) # ---- define general config values ---- self.configs[cname].update({ "Note": "The 'r', 'theta', and 'phi' fields in the " "NI_XYZ data set are suppose to represent " "spherical coordinates of the probe tip with " "respect to the pivot point of the probe drive, " "but the current calculation and population of the" "fields is inaccurate. For user reference, the " "distance between the probe drive pivot point and" "LaPD axis is (Lpp =) 58.771 cm.", "Lpp": 58.771 * u.cm, }) # ---- define motion list values ---- self.configs[cname]["motion lists"] = {} # get sub-group names (i.e. ml names) _ml_names = [] for name in self.group: if isinstance(self.group[name], h5py.Group): _ml_names.append(name) # a motion list group must have the attributes # Nx, Ny, Nz, dx, dy, dz, x0, y0, z0 names_to_remove = [] for name in _ml_names: if all(attr not in self.group[name].attrs for attr in ("Nx", "Ny", "Nz", "dx", "dy", "dz", "x0", "y0", "z0")): names_to_remove.append(name) if bool(names_to_remove): for name in names_to_remove: _ml_names.remove(name) # warn if no motion lists exist if not bool(_ml_names): why = "NI_XYZ has no identifiable motion lists" warn(why) # gather ML config values pairs = [ ("Nx", "Nx"), ("Ny", "Ny"), ("Nz", "Nz"), ("dx", "dx"), ("dy", "dy"), ("dz", "dz"), ("fan_XYZ", "fan_XYZ"), ("max_ydrive_steps", "max_ydrive_steps"), ("min_ydrive_steps", "min_ydrive_steps"), ("max_zdrive_steps", "max_zdrive_steps"), ("min_zdrive_steps", "min_zdrive_steps"), ("x0", "x0"), ("y0", "y0"), ("z0", "z0"), ("port", "z_port"), ] for name in _ml_names: # initialize ML dictionary self.configs[cname]["motion lists"][name] = {} # add ML values for pair in pairs: try: # get attribute value val = self.group[name].attrs[pair[1]] # condition value if np.issubdtype(type(val), np.bytes_): # - val is a np.bytes_ string val = _bytes_to_str(val) if pair[1] == "fan_XYZ": # convert to boolean if val == "TRUE": val = True else: val = False # assign val to configs self.configs[cname]["motion lists"][name][pair[0]] = val except KeyError: self.configs[cname]["motion lists"][name][pair[0]] = None why = (f"Motion List attribute '{pair[1]}' not found for " f"ML group '{name}'") warn(why) # ---- define 'dset paths' ---- self.configs[cname]["dset paths"] = (dset.name, ) # ---- define 'shotnum' ---- # check dset for 'Shot number' field if "Shot number" not in dset.dtype.names: why = f"Dataset '{dset.name}' is missing 'Shot number' field" raise HDFMappingError(self.info["group path"], why=why) # initialize self.configs[cname]["shotnum"] = { "dset paths": self.configs[cname]["dset paths"], "dset field": ("Shot number", ), "shape": dset.dtype["Shot number"].shape, "dtype": np.int32, } # ---- define 'state values' ---- self._configs[cname]["state values"] = { "xyz": { "dset paths": self._configs[cname]["dset paths"], "dset field": ("x", "y", "z"), "shape": (3, ), "dtype": np.float64, }, } # check dset for 'x', 'y' and 'z' fields fx = "x" not in dset.dtype.names fy = "y" not in dset.dtype.names fz = "z" not in dset.dtype.names if fx and fy and fz: why = f"Dataset '{dset.name}' missing fields 'x', 'y' and 'z'" raise HDFMappingError(self.info["group path"], why=why) elif fx or fy or fz: mlist = [("x", fx), ("y", fy), ("z", fz)] missf = ", ".join([val for val, bol in mlist if bol]) why = f" Dataset '{dset.name}' missing field '{missf}'" warn(why)
def _build_configs(self): """Builds the :attr:`configs` dictionary.""" # check there are configurations to map if len(self.subgroup_names) == 0: why = "has no mappable configurations" raise HDFMappingError(self._info["group path"], why=why) # build configuration dictionaries # - assume every sub-group represents a unique configuration # to the control device # - the name of each sub-group is used as the configuration # name # - assume all configurations are active (i.e. used) # for name in self.subgroup_names: # get configuration group cong = self.group[name] # get dataset try: dset = self.group[self.construct_dataset_name()] except KeyError: why = ( f"Dataset '{self.construct_dataset_name()}' not found for " f"configuration group '{name}'" ) raise HDFMappingError(self._info["group path"], why=why) # initialize _configs self._configs[name] = {} # ---- define general info values ---- pairs = [ ("IP address", "IP address"), ("generator device", "Generator type"), ("GPIB address", "GPIB address"), ("initial state", "Initial state"), ("command list", "Waveform command list"), ] for pair in pairs: try: # get attribute value val = cong.attrs[pair[1]] # condition value if pair[0] == "command list": # - val gets returned as a np.bytes_ string # - split line returns # - remove trailing/leading whitespace # val = _bytes_to_str(val).splitlines() val = tuple([cls.strip() for cls in val]) elif pair[0] in ("IP address", "generator device", "initial state"): # - val is a np.bytes_ string # val = _bytes_to_str(val) else: # no conditioning is needed # 'GPIB address' val is np.uint32 pass # assign val to _configs self._configs[name][pair[0]] = val except KeyError: self._configs[name][pair[0]] = None warn_str = ( f"Attribute '{pair[1]}' not found in control device '" f"{self.device_name}' configuration group '{name}'" ) if pair[0] != "command list": warn_str += ", continuing with mapping" warn(warn_str) else: why = ( f"Attribute '{pair[1]}' not found for configuration " f"group '{name}'" ) raise HDFMappingError(self._info["group path"], why=why) # ---- define 'dset paths' ---- self._configs[name]["dset paths"] = (dset.name,) # ---- define 'shotnum' ---- # initialize self._configs[name]["shotnum"] = { "dset paths": self._configs[name]["dset paths"], "dset field": ("Shot number",), "shape": dset.dtype["Shot number"].shape, "dtype": np.int32, } # ---- define 'state values' ---- # catch and suppress warnings only for initialization with warnings.catch_warnings(): warnings.simplefilter("ignore") try: sv_state = self._construct_state_values_dict( name, self._default_re_patterns ) except KeyError: sv_state = {} # initialize self._configs[name]["state values"] = ( sv_state if bool(sv_state) else self._default_state_values_dict(name) )
def test_mapping(self, _lapdf: File): _map = self.create_map(_lapdf) # LaPDMap subclasses HDFMap self.assertIsInstance(_map, HDFMap) # check paths self.assertTrue(hasattr(_map, "DEVICE_PATHS")) self.assertEqual(_map.DEVICE_PATHS["control"], "Raw data + config") self.assertEqual(_map.DEVICE_PATHS["digitizer"], "Raw data + config") self.assertEqual(_map.DEVICE_PATHS["msi"], "MSI") # additional attributes self.assertTrue(hasattr(_map, "is_lapd")) self.assertTrue(hasattr(_map, "lapd_version")) self.assertTrue(hasattr(_map, "exp_info")) self.assertTrue(hasattr(_map, "run_info")) self.assertIsInstance(type(_map).is_lapd, property) self.assertIsInstance(type(_map).lapd_version, property) self.assertIsInstance(type(_map).exp_info, property) self.assertIsInstance(type(_map).run_info, property) # -- examine `is_lapd` and `lapd_version` ---- # # By defualt, FauxHDFBuilder adds the # 'LaPD HDF5 software version' attribute to the test file. lapd_version = _bytes_to_str( self.f.attrs["LaPD HDF5 software version"]) self.assertTrue(_map.is_lapd) self.assertEqual(_map.lapd_version, lapd_version) # remove the LaPD version del self.f.attrs["LaPD HDF5 software version"] self.assertFalse(_map.is_lapd) self.assertIsNone(_map.lapd_version) self.f.attrs["LaPD HDF5 software version"] = np.bytes_(lapd_version) # -- examine `exp_info` ---- attrs = [ ("Investigator", "investigator"), ("Experiment name", "exp name"), ("Experiment description", "exp description"), ("Experiment set name", "exp set name"), ("Experiment set description", "exp set description"), ] path = "Raw data + config" self.f[path].attrs["z"] = np.bytes_("z") for aname, iname in attrs: try: old_val = self.f[path].attrs[aname] except KeyError: old_val = "something" self.f[path].attrs[aname] = old_val if isinstance(old_val, (np.bytes_, bytes)): old_val = _bytes_to_str(old_val) # equality self.assertEqual(_map.exp_info[iname], old_val) # remove attribute del self.f[path].attrs[aname] self.assertEqual(_map.exp_info[iname], "") # return val if old_val == "something": continue elif isinstance(old_val, str): old_val = np.bytes_(old_val) self.f[path].attrs[aname] = old_val del self.f[path].attrs["z"] # -- examine `run_info` ---- attrs = [ ("Data run", "run name"), ("Description", "run description"), ("Status", "run status"), ("Status date", "run date"), ] path = "Raw data + config" self.f[path].attrs["z"] = np.bytes_("z") for aname, iname in attrs: try: old_val = self.f[path].attrs[aname] except KeyError: old_val = "something" self.f[path].attrs[aname] = old_val if isinstance(old_val, (np.bytes_, bytes)): old_val = _bytes_to_str(old_val) # equality self.assertEqual(_map.run_info[iname], old_val) # remove attribute del self.f[path].attrs[aname] self.assertEqual(_map.run_info[iname], "") # return val if old_val == "something": continue elif isinstance(old_val, str): old_val = np.bytes_(old_val) self.f[path].attrs[aname] = old_val del self.f[path].attrs["z"] # -- `__init__` waring ---- with mock.patch.object(LaPDMap, "is_lapd", new_callable=mock.PropertyMock, return_value=False) as mock_il: with self.assertWarns(UserWarning): _map = self.create_map(_lapdf) self.assertTrue(mock_il.called)
def _analyze_probelist(self, gname: str) -> dict: """ Determines if `gname` matches the RE for a probe list group name. If yes, then it gathers the probe info. :param str gname: name of potential probe list group :return: dictionary with `'probe-id'` and `'config'` keys """ # Define RE pattern # - A probe list group follows the naming scheme of: # # 'Probe: XY[<RNUM>]: <NAME>' # # where <RNUM> is the receptacle number and <NAME> is the # probe name # _pattern = r"(\bProbe:\sXY\[)(?P<RNUM>\b\d+\b)(\]:\s)(?P<NAME>.+\b)" # match _pattern against gname _match = re.fullmatch(_pattern, gname) # gather pl info # - Note: a missing HDF5 attribute will not cause the mapping to # fail, the associated mapping item will be given an # appropriate None vale # if _match is not None: # define probe list dict probe_name = _match.group("NAME") receptacle_str = _match.group("RNUM") pl = {"probe-id": f"{probe_name} - {receptacle_str}", "config": {}} # get pl group plg = self.group[gname] # gather pl info # -- define 'group name', 'group path', and 'probe name' -- pl["config"]["group name"] = gname pl["config"]["group path"] = plg.name pl["config"]["probe name"] = probe_name # -- check PL name -- try: # get value pl_name = plg.attrs["Probe"] if np.issubdtype(type(pl_name), np.bytes_): # decode to 'utf-8' pl_name = _bytes_to_str(pl_name) # check against discovered probe name if probe_name != pl_name: warn( f"{pl['config']['group name']} Discovered probe list name " f"'{probe_name}' does not match the name defined in " f"attributes '{pl_name}', using discovered name.") except KeyError: warn_str = ( f"{pl['config']['group name']}: Probe list attribute 'Probe' " f"not found") warn(warn_str) # -- check receptacle number -- try: # define receptacle number pl["config"]["receptacle"] = int(_match.group("RNUM")) # get value rnum = plg.attrs["Receptacle"] # check against discovered receptacle number if pl["config"]["receptacle"] != rnum: warn_str = ( f"{pl['config']['group name']}: Discovered receptacle " f"number '{pl['config']['receptacle']}' does not match " f"the number defined in attributes '{rnum}', using " f"discovered name.") warn(warn_str) except KeyError: warn_str = ( f"{pl['config']['group name']}: Probe list attribute 'Receptacle' " f"not found") warn(warn_str) # -- check pairs -- pairs = [ ("calib", "Calibration"), ("level sy (cm)", "Level sy (cm)"), ("port", "Port"), ("probe channels", "Probe channels"), ("probe type", "Probe type"), ("unnamed", "Unnamed"), ("sx at end (cm)", "sx at end (cm)"), ("z", "z"), ] for pair in pairs: try: # get value val = plg.attrs[pair[1]] # condition value if np.issubdtype(type(val), np.bytes_): # - val is a np.bytes_ string val = _bytes_to_str(val) # assign val pl["config"][pair[0]] = val except KeyError: pl["config"][pair[0]] = None warn_str = ( f"{pl['config']['group name']}: attribute '{pair[1]}' " f"not found") warn(warn_str) # return return pl else: # not a probe list return {}
def _analyze_motionlist(self, gname: str) -> dict: """ Determines if `gname` matches the RE for a motion list group name. It yes, then it gathers the motion list info. :param str gname: name of potential motion list group :return: dictionary with `'name'` and `'config'` keys """ # Define RE pattern # - A motion list group follows the naming scheme of: # # 'Motion list: <NAME>' # # where <NAME> is the motion list name # _pattern = r"(\bMotion list:\s)(?P<NAME>.+\b)" # match _pattern against gname _match = re.fullmatch(_pattern, gname) # gather ml info # - Note: a missing HDF5 attribute will not cause the mapping to # fail, the associated mapping item will be given an # appropriate None vale # if _match is not None: # define motion list dict ml = {"name": _match.group("NAME"), "config": {}} # get ml group mlg = self.group[gname] # gather motion list info # -- define 'group name' and 'group path' -- ml["config"]["group name"] = gname ml["config"]["group path"] = mlg.name # -- check ML name -- try: ml_name = mlg.attrs["Motion list"] if np.issubdtype(type(ml_name), np.bytes_): # decode to 'utf-8' ml_name = _bytes_to_str(ml_name) if ml["name"] != ml_name: warn_str = ( f"Discovered motion list name '{ml['name']}' does not " f"match the name defined in attributes '{ml_name}', " f"using discovered name") warn(warn_str) except KeyError: warn_str = ( f"Motion list attribute 'Motion list' not found for ML " f"'{ml['config']['group name']}'") warn(warn_str) # -- check simple pairs -- pairs = [ ("created date", "Created date"), ("data motion count", "Data motion count"), ("motion count", "Motion count"), ] for pair in pairs: try: # get attribute value val = mlg.attrs[pair[1]] # condition value if np.issubdtype(type(val), np.bytes_): # - val is a np.bytes_ string val = _bytes_to_str(val) # assign val ml["config"][pair[0]] = val except KeyError: ml["config"][pair[0]] = None warn_str = ( f"Motion list attribute '{pair[1]}' not found for ML " f"'{ml['name']}'") warn(warn_str) # -- check 'delta' -- try: val = np.array( [mlg.attrs["Delta x"], mlg.attrs["Delta y"], 0.0]) ml["config"]["delta"] = val except KeyError: ml["config"]["delta"] = np.array([None, None, None]) warn_str = ( f"Motion list attributes 'Delta x' and/or 'Delta y' not " f"found for ML '{ml['name']}'") warn(warn_str) # -- check 'center' -- try: val = np.array([ mlg.attrs["Grid center x"], mlg.attrs["Grid center y"], 0.0 ]) ml["config"]["center"] = val except KeyError: ml["config"]["center"] = np.array([None, None, None]) warn_str = ( f"Motion list attributes 'Grid center x' and/or 'Grid " f"center y' not found for ML '{ml['name']}'") warn(warn_str) # -- check 'npoints' -- try: val = np.array([mlg.attrs["Nx"], mlg.attrs["Ny"], 1]) ml["config"]["npoints"] = val except KeyError: ml["config"]["npoints"] = np.array([None, None, None]) warn_str = ( f"Motion list attributes 'Nx' and/or 'Ny' not found " f"for ML '{ml['name']}'") warn(warn_str) # return return ml else: # not a motion list return {}
def _build_configs(self): """Build the :attr:`configs` dictionary""" # Assumptions: # 1. only one NI_XZ drive was ever built, so there will always # be only one configuration # - naming configuration 'config01' # 2. there's only one dataset ever created 'Run time list' # 3. there can be multiple motion lists defined # - each sub-group is a configuration for a different # motion list # - the name of the sub-group is the name of the motion list # # initialize configuration cname = "config01" self.configs[cname] = {} # check there are existing motion lists if len(self.subgroup_names) == 0: warn( f"{self.info['group path']}: no defining motion list groups exist" ) # get dataset try: dset = self.group[self.construct_dataset_name()] except KeyError: why = f"Dataset '{self.construct_dataset_name()}' not found" raise HDFMappingError(self.info["group path"], why=why) # ---- define general config values ---- # none exist # ---- define motion list values ---- self.configs[cname]["motion lists"] = {} # get sub-group names (i.e. ml names) _ml_names = [] for name in self.group: if isinstance(self.group[name], h5py.Group): _ml_names.append(name) # a motion list group must have the attributes # Nx, Nz, dx, dz, x0, z0 names_to_remove = [] for name in _ml_names: if all(attr not in self.group[name].attrs for attr in ("Nx", "Ny", "dx", "dz", "x0", "z0")): names_to_remove.append(name) if bool(names_to_remove): for name in names_to_remove: _ml_names.remove(name) # warn if no motion lists exist if not bool(_ml_names): why = "NI_XZ has no identifiable motion lists" warn(why) # gather ML config values pairs = [ ("Nx", "Nx"), ("Nz", "Nz"), ("dx", "dx"), ("dz", "dz"), ("fan_XZ", "fan_XZ"), ("max_zdrive_steps", "max_zdrive_steps"), ("min_zdrive_steps", "min_zdrive_steps"), ("x0", "x0"), ("z0", "z0"), ("port", "z_port"), ] for name in _ml_names: # initialize ML dictionary self.configs[cname]["motion lists"][name] = {} # add ML values for pair in pairs: try: # get attribute value val = self.group[name].attrs[pair[1]] # condition value if np.issubdtype(type(val), np.bytes_): # - val is a np.bytes_ string val = _bytes_to_str(val) if pair[1] == "fan_XZ": # convert to boolean if val == "TRUE": val = True else: val = False # assign val to configs self.configs[cname]["motion lists"][name][pair[0]] = val except KeyError: self.configs[cname]["motion lists"][name][pair[0]] = None why = (f"Motion List attribute '{pair[1]}' not found for " f"ML group '{name}'") warn(why) # ---- define 'dset paths' ---- self.configs[cname]["dset paths"] = (dset.name, ) # ---- define 'shotnum' ---- # check dset for 'Shot number' field if "Shot number" not in dset.dtype.names: why = f"Dataset '{dset.name}' is missing 'Shot number' field" raise HDFMappingError(self.info["group path"], why=why) # initialize self.configs[cname]["shotnum"] = { "dset paths": self.configs[cname]["dset paths"], "dset field": ("Shot number", ), "shape": dset.dtype["Shot number"].shape, "dtype": np.int32, } # ---- define 'state values' ---- self._configs[cname]["state values"] = { "xyz": { "dset paths": self._configs[cname]["dset paths"], "dset field": ("x", "", "z"), "shape": (3, ), "dtype": np.float64, }, } # check dset for 'x' and 'z' fields fx = "x" not in dset.dtype.names fz = "z" not in dset.dtype.names if fx and fz: why = f"Dataset '{dset.name}' missing both field 'x' and 'z'" raise HDFMappingError(self.info["group path"], why=why) elif fx or fz: missf = "x" if fx else "z" why = f" Dataset '{dset.name}' missing field '{missf}'" warn(why)
def _adc_info_first_pass( self, adc_name: str, config_group: h5py.Group ) -> Tuple[Tuple[int, Tuple[int, ...], Dict[str, Any]], ...]: """ Gathers the analog-digital-converter's connected board and channel numbers, as well as, the associated setup configuration for each connected board. :param adc_name: name of analog-digital-converter :param config_group: HDF5 group object of the configuration group :returns: Tuple of 3-element tuples where the 1st element of the nested tuple represents a connected *board* number, the 2nd element is a tuple of connected *channel* numbers for the *board*, and the 3rd element is a dictionary of adc setup values (*bit*, *clock rate*, etc.). On the first pass, the meta-info dict will contain: .. csv-table:: :header: "Key", "Description" :widths: 20, 60 ":: 'bit' ", " bit resolution of the digitizer's analog-digital-converter " ":: 'clock rate' ", " clock rate of the digitizer's analog-digital-converter " ":: 'shot average (software)' ", " number of shots intended to be averaged over " ":: 'sample average (hardware)' ", " number of data samples average together " """ # 'Raw data + config/SIS 3301' group has only one possible # adc ('SIS 3301') # adc_info = ( # int, # board number # (int, ...), # connected channel numbers # {'bit': 14, # bit resolution # 'clock rate': <Quantity 100.0 MHz>, # 'nshotnum': int, # 'shot average (software)': int, # 'sample average (hardware)': int}) # # initialize adc_info = [] # conns is a tuple of tuples where each tuple is a seed for the # elements of `adc_info` conns = self._find_adc_connections(adc_name, config_group) for conn in conns: # define 'bit' and 'clock rate' conn[2]["bit"] = 14 conn[2]["clock rate"] = u.Quantity(100.0, unit="MHz") # add 'shot average (software)' to dict if "Shots to average" in config_group.attrs: shtave = config_group.attrs["Shots to average"] if shtave == 0 or shtave == 1: shtave = None else: shtave = None conn[2]["shot average (software)"] = shtave # add 'sample average (hardware)' to dict splave = None avestr = "" find_splave = False if "Samples to average" in config_group.attrs: avestr = config_group.attrs["Samples to average"] avestr = _bytes_to_str(avestr) find_splave = True elif "Unnamed" in config_group.attrs: avestr = config_group.attrs["Unnamed"] try: avestr = _bytes_to_str(avestr) find_splave = True except TypeError: avestr = "" find_splave = False if find_splave: if avestr != "No averaging": _match = re.fullmatch( r"(\bAverage\s)(?P<NAME>.+)(\sSamples\b)", avestr) if bool(_match): try: # splave = int(avestr.split()[1]) splave = int(_match.group("NAME")) if splave == 0 or splave == 1: splave = None except ValueError: warn( f"Found sample averaging of '{_match.group('NAME')}' " f"but can not convert to int...using a value of " f"None instead") conn[2]["sample average (hardware)"] = splave # append info adc_info.append(conn) return tuple(adc_info)