Exemple #1
0
    def _build_configs(self):
        """Build the :attr:`configs` dictionary"""
        # Assumptions:
        # 1. only one NI_XYZ drive was ever built, so there will always
        #    be only one configuration
        #    - naming configuration 'config01'
        # 2. there's only one dataset ever created 'Run time list'
        # 3. there can be multiple motion lists defined
        #    - each sub-group is a configuration for a different
        #      motion list
        #    - the name of the sub-group is the name of the motion list
        #
        # initialize configuration
        cname = 'config01'
        self.configs[cname] = {}

        # check there are existing motion lists
        if len(self.subgroup_names) == 0:
            warn(self.info['group path'] +
                 ': no defining motion list groups exist')

        # get dataset
        try:
            dset = self.group[self.construct_dataset_name()]
        except KeyError:
            why = ("Dataset '" + self.construct_dataset_name() + "' not found")
            raise HDFMappingError(self.info['group path'], why=why)

        # ---- define general config values                         ----
        self.configs[cname].update({
            'Note':
            "The 'r', 'theta', and 'phi' fields in the "
            "NI_XYZ data set are suppose to represent "
            "spherical coordinates of the probe tip with "
            "respect to the pivot point of the probe drive, "
            "but the current calculation and population of the"
            "fields is inaccurate.  For user reference, the "
            "distance between the probe drive pivot point and"
            "LaPD axis is (Lpp =) 58.771 cm.",
            'Lpp':
            58.771 * u.cm,
        })

        # ---- define motion list values                            ----
        self.configs[cname]['motion lists'] = {}

        # get sub-group names (i.e. ml names)
        _ml_names = []
        for name in self.group:
            if isinstance(self.group[name], h5py.Group):
                _ml_names.append(name)

        # a motion list group must have the attributes
        # Nx, Ny, Nz, dx, dy, dz, x0, y0, z0
        names_to_remove = []
        for name in _ml_names:
            if all(attr not in self.group[name].attrs
                   for attr in ('Nx', 'Ny', 'Nz', 'dx', 'dy', 'dz', 'x0', 'y0',
                                'z0')):
                names_to_remove.append(name)
        if bool(names_to_remove):
            for name in names_to_remove:
                _ml_names.remove(name)

        # warn if no motion lists exist
        if not bool(_ml_names):
            why = 'NI_XYZ has no identifiable motion lists'
            warn(why)

        # gather ML config values
        pairs = [
            ('Nx', 'Nx'),
            ('Ny', 'Ny'),
            ('Nz', 'Nz'),
            ('dx', 'dx'),
            ('dy', 'dy'),
            ('dz', 'dz'),
            ('fan_XYZ', 'fan_XYZ'),
            ('max_ydrive_steps', 'max_ydrive_steps'),
            ('min_ydrive_steps', 'min_ydrive_steps'),
            ('max_zdrive_steps', 'max_zdrive_steps'),
            ('min_zdrive_steps', 'min_zdrive_steps'),
            ('x0', 'x0'),
            ('y0', 'y0'),
            ('z0', 'z0'),
            ('port', 'z_port'),
        ]
        for name in _ml_names:
            # initialize ML dictionary
            self.configs[cname]['motion lists'][name] = {}

            # add ML values
            for pair in pairs:
                try:
                    # get attribute value
                    val = self.group[name].attrs[pair[1]]

                    # condition value
                    if np.issubdtype(type(val), np.bytes_):
                        # - val is a np.bytes_ string
                        val = val.decode('utf-8')
                    if pair[1] == 'fan_XYZ':
                        # convert to boolean
                        if val == 'TRUE':
                            val = True
                        else:
                            val = False

                    # assign val to configs
                    self.configs[cname]['motion lists'][name][pair[0]] \
                        = val
                except KeyError:
                    self.configs[cname]['motion lists'][name][pair[0]] \
                        = None

                    why = "Motion List attribute '" + pair[1] \
                          + "' not found for ML group '" + name + "'"
                    warn(why)

        # ---- define 'dset paths'                                  ----
        self.configs[cname]['dset paths'] = (dset.name, )

        # ---- define 'shotnum'                                     ----
        # check dset for 'Shot number' field
        if 'Shot number' not in dset.dtype.names:
            why = "Dataset '" + dset.name \
                  + "' is missing 'Shot number' field"
            raise HDFMappingError(self.info['group path'], why=why)

        # initialize
        self.configs[cname]['shotnum'] = {
            'dset paths': self.configs[cname]['dset paths'],
            'dset field': ('Shot number', ),
            'shape': dset.dtype['Shot number'].shape,
            'dtype': np.int32,
        }

        # ---- define 'state values'                                ----
        self._configs[cname]['state values'] = {
            'xyz': {
                'dset paths': self._configs[cname]['dset paths'],
                'dset field': ('x', 'y', 'z'),
                'shape': (3, ),
                'dtype': np.float64
            },
        }

        # check dset for 'x', 'y' and 'z' fields
        fx = 'x' not in dset.dtype.names
        fy = 'y' not in dset.dtype.names
        fz = 'z' not in dset.dtype.names
        if fx and fy and fz:
            why = "Dataset '" + dset.name \
                  + "' missing fields 'x', 'y' and 'z'"
            raise HDFMappingError(self.info['group path'], why=why)
        elif fx or fy or fz:
            mlist = [('x', fx), ('y', fy), ('z', fz)]
            missf = ', '.join([val for val, bol in mlist if bol])
            why = " Dataset '" + dset.name \
                  + "' missing field '" + missf + "'"
            warn(why)
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # look for required datasets
        for dset_name in [
                'Magnet power supply currents', 'Magnetic field profile',
                'Magnetic field summary'
        ]:
            if dset_name not in self.group:
                why = "dataset '" + dset_name + "' not found"
                raise HDFMappingError(self.info['group path'], why=why)

        # initialize general info values
        pairs = [('calib tag', 'Calibration tag'),
                 ('z', 'Profile z locations')]
        for pair in pairs:
            try:
                val = self.group.attrs[pair[1]]
                if isinstance(val, (list, tuple, np.ndarray)):
                    self._configs[pair[0]] = val
                else:
                    self._configs[pair[0]] = [val]
            except KeyError:
                self._configs[pair[0]] = []
                warn("Attribute '" + pair[1] +
                     "' not found for MSI diagnostic '" + self.device_name +
                     "', continuing with mapping")

        # initialize 'shape'
        # - this is used by HDFReadMSI
        self._configs['shape'] = ()

        # initialize 'shotnum'
        self._configs['shotnum'] = {
            'dset paths': (),
            'dset field': ('Shot number', ),
            'shape': (),
            'dtype': np.int32,
        }

        # initialize 'signals'
        # - there are two signal fields
        #   1. 'magnet ps current'
        #   2. 'magnetic field'
        #
        self._configs['signals'] = {
            'magnet ps current': {
                'dset paths': (),
                'dset field': (),
                'shape': (),
                'dtype': np.float32,
            },
            'magnetic field': {
                'dset paths': (),
                'dset field': (),
                'shape': (),
                'dtype': np.float32,
            }
        }

        # initialize 'meta'
        self._configs['meta'] = {
            'shape': (),
            'timestamp': {
                'dset paths': (),
                'dset field': ('Timestamp', ),
                'shape': (),
                'dtype': np.float64,
            },
            'data valid': {
                'dset paths': (),
                'dset field': ('Data valid', ),
                'shape': (),
                'dtype': np.int8,
            },
            'peak magnetic field': {
                'dset paths': (),
                'dset field': ('Peak magnetic field', ),
                'shape': (),
                'dtype': np.float32,
            },
        }

        # ---- update configs related to 'Magnetic field summary'   ----
        # - dependent configs are:
        #   1. 'shotnum'
        #   2. all of 'meta'
        #
        dset_name = 'Magnetic field summary'
        dset = self.group[dset_name]

        # define 'shape'
        expected_fields = [
            'Shot number', 'Timestamp', 'Data valid', 'Peak magnetic field'
        ]
        if dset.ndim == 1 and \
                all(field in dset.dtype.names
                    for field in expected_fields):
            self._configs['shape'] = dset.shape
        else:
            why = "'/Magnetic field summary' does not match expected " \
                  "shape"
            raise HDFMappingError(self.info['group path'], why=why)

        # update 'shotnum'
        self._configs['shotnum']['dset paths'] = (dset.name, )
        self._configs['shotnum']['shape'] = \
            dset.dtype['Shot number'].shape

        # update 'meta/timestamp'
        self._configs['meta']['timestamp']['dset paths'] = (dset.name, )
        self._configs['meta']['timestamp']['shape'] = \
            dset.dtype['Timestamp'].shape

        # update 'meta/data valid'
        self._configs['meta']['data valid']['dset paths'] = (dset.name, )
        self._configs['meta']['data valid']['shape'] = \
            dset.dtype['Data valid'].shape

        # update 'meta/peak magnetic field'
        self._configs['meta']['peak magnetic field']['dset paths'] = \
            (dset.name,)
        self._configs['meta']['peak magnetic field']['shape'] = \
            dset.dtype['Peak magnetic field'].shape

        # update configs related to 'Magnet power supply currents'  ----
        # - dependent configs are:
        #   1. 'signals/magnet ps current'
        #
        dset_name = 'Magnet power supply currents'
        dset = self.group[dset_name]
        self._configs['signals']['magnet ps current']['dset paths'] = \
            (dset.name,)

        # check 'shape'
        _build_success = True
        if dset.dtype.names is not None:
            # dataset has fields (it should not have fields)
            _build_success = False
        elif dset.ndim == 2:
            if dset.shape[0] == self._configs['shape'][0]:
                self._configs['signals']['magnet ps current']['shape'] = (
                    dset.shape[1], )
            else:
                _build_success = False
        else:
            _build_success = False
        if not _build_success:
            why = "'/Magnet power supply currents' does not match " \
                  "expected shape"
            raise HDFMappingError(self.info['group path'], why=why)

        # update configs related to 'Magnetic field profile'        ----
        # - dependent configs are:
        #   1. 'signals/magnetic field'
        #
        dset_name = 'Magnetic field profile'
        dset = self.group[dset_name]
        self._configs['signals']['magnetic field']['dset paths'] = \
            (dset.name,)

        # check 'shape'
        _build_success = True
        if dset.dtype.names is not None:
            # dataset has fields (it should not have fields)
            _build_success = False
        elif dset.ndim == 2:
            if dset.shape[0] == self._configs['shape'][0]:
                self._configs['signals']['magnetic field']['shape'] = (
                    dset.shape[1], )
            else:
                _build_success = False
        else:
            _build_success = False
        if not _build_success:
            why = "'/Magnetic field profile' does not match expected " \
                  "shape"
            raise HDFMappingError(self.info['group path'], why=why)
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # What should be in configs
        # 1. num. of interferometers
        # 2. start times for each interferometers
        # 3. dt for each interferometer
        # 4. n_bar_L for each interferometer
        # 5. z location for each interferometer
        # 6. 'shotnum' field
        #    - contains mapping of HDF5 file quantity to np
        #    a. shape
        #    b. dtype
        # 7. 'signals' field
        #    - another dict where keys are the fields to be added to
        #      the np.array
        # 8. 'meta' field
        #

        # initialize general info values
        # - pairs[0:2] are found in the main group's attributes
        # - pairs[2] corresponds to the sub-group names
        # - pairs[3:] are found in the main group's attributes (as an
        #     array) and in the sub-group attributes (elements of the
        #     main group's array)...I'm choosing to populate via the
        #     sub-group attributes to ensure one-to-one correspondence
        #     when extracting data with the HDFReadMSI class
        #
        pairs = [('n interferometer', 'Interferometer count'),
                 ('calib tag', 'Calibration tag'),
                 ('interferometer name', None), ('t0', 'Start time'),
                 ('dt', 'Timestep'), ('n_bar_L', 'n_bar_L'),
                 ('z', 'z location')]
        self._configs['interferometer name'] = []
        self._configs['t0'] = []
        self._configs['dt'] = []
        self._configs['n_bar_L'] = []
        self._configs['z'] = []
        for pair in pairs[0:2]:
            try:
                val = self.group.attrs[pair[1]]
                if isinstance(val, (list, tuple, np.ndarray)):
                    self._configs[pair[0]] = val
                else:
                    self._configs[pair[0]] = [val]
            except KeyError:
                self._configs[pair[0]] = []
                warn("Attribute '" + pair[1] +
                     "' not found for MSI diagnostic '" + self.device_name +
                     "', continuing with mapping")

        # more handling of general info value 'n interferometer'
        pair = pairs[0]
        check_n_inter = True
        if len(self._configs[pair[0]]) != 1:
            check_n_inter = False
            warn("Attribute '" + pair[1] + "' for MSI diagnostic '" +
                 self.device_name +
                 "' not an integer, continuing with mapping")
        elif not isinstance(self._configs[pair[0]][0], (int, np.integer)):
            check_n_inter = False
            warn("Attribute '" + pair[1] + "' for MSI diagnostic '" +
                 self.device_name +
                 "' not an integer, continuing with mapping")

        # initialize 'shape'
        # - this is used by HDFReadMSI
        self._configs['shape'] = ()

        # initialize 'shotnum'
        self._configs['shotnum'] = {
            'dset paths': [],
            'dset field': ('Shot number', ),
            'shape': [],
            'dtype': np.int32,
        }

        # initialize 'signals'
        # - there is only one signal field named 'signal'
        self._configs['signals'] = {
            'signal': {
                'dset paths': [],
                'dset field': (),
                'shape': [],
                'dtype': np.float32,
            }
        }

        # initialize 'meta'
        self._configs['meta'] = {
            'timestamp': {
                'dset paths': [],
                'dset field': ('Timestamp', ),
                'shape': [],
                'dtype': np.float64
            },
            'data valid': {
                'dset paths': [],
                'dset field': ('Data valid', ),
                'shape': [],
                'dtype': np.int8
            },
            'peak density': {
                'dset paths': [],
                'dset field': ('Peak density', ),
                'shape': [],
                'dtype': np.float32
            },
        }
        self._configs['meta']['shape'] = () if not check_n_inter \
            else (int(self._configs['n interferometer'][0]),)

        # populate self.configs from each interferometer group
        # - all the population is done in this for-loop to ensure all
        #   lists are one-to-one
        #
        n_inter_count = 0
        sn_size = 0
        sig_size = 0
        for name in self.group:
            if isinstance(self.group[name], h5py.Group) \
                    and 'Interferometer' in name:
                # count the number of interferometers
                n_inter_count += 1

                # ensure required datasets are present
                for dset_name in [
                        'Interferometer summary list', 'Interferometer trace'
                ]:
                    if dset_name not in self.group[name]:
                        why = ("dataset '" + dset_name + "' not found " +
                               "for 'Interferometer/" + name + "'")
                        raise HDFMappingError(self.info['group path'], why=why)

                # populate general info values
                self._configs['interferometer name'].append(name)
                for pair in pairs[3::]:
                    try:
                        self._configs[pair[0]].append(
                            self.group[name].attrs[pair[1]])
                    except KeyError:
                        self._configs[pair[0]].append(None)
                        warn("Attribute '" + pair[1] +
                             "' not found for MSI diagnostic '" +
                             self.device_name + '/' + name +
                             "', continuing with mapping")

                # define values to ensure dataset sizes are consistent
                # sn_size  = number of shot numbers
                # sig_size = number of samples in interferometer trace
                #            - the way the np.array will be constructed
                #              requires all interferometer signals to
                #              have the same sample size
                # - define sn_size and ensure it's consistent among all
                #   datasets
                # - define sig_size and ensure it's consistent among all
                #   datasets
                #
                # - Enforcement of the these dimensions is done when
                #   mapping each dataset below
                #
                if n_inter_count == 1:
                    # define sn_size
                    dset_name = name + '/Interferometer summary list'
                    dset = self.group[dset_name]
                    if dset.ndim == 1:
                        sn_size = self.group[dset_name].shape[0]
                    else:
                        why = "'/Interferometer summary list' " \
                              "does not match expected shape"
                        raise HDFMappingError(self.info['group path'], why=why)

                    # define sig_size
                    dset_name = name + '/Interferometer trace'
                    dset = self.group[dset_name]
                    shape = self.group[dset_name].shape
                    if dset.dtype.names is not None:
                        # dataset has fields (it should not have fields)
                        why = "can not handle a 'signal' dataset" \
                              + "(" + dset_name + ") with fields"
                        raise HDFMappingError(self.info['group path'], why=why)
                    elif dset.ndim == 2:
                        if dset.shape[0] == sn_size:
                            sig_size = shape[1]
                        else:
                            why = "'Interferometer trace' and " \
                                  "'Interferometer summary list' do " \
                                  "not have same number of rows " \
                                  "(shot numbers)"
                            raise HDFMappingError(self.info['group path'],
                                                  why=why)
                    else:
                        why = "'/Interferometer race' does not" \
                              " match expected shape"
                        raise HDFMappingError(self.info['group path'], why=why)

                    # define 'shape'
                    self._configs['shape'] = (sn_size, )

                # -- update configs related to                      ----
                # -- 'Interferometer summary list'                  ----
                # - dependent configs are:
                #   1. 'shotnum'
                #   2. all of 'meta'
                #
                dset_name = name + '/Interferometer summary list'
                dset = self.group[dset_name]
                path = dset.name

                # check 'shape'
                expected_fields = [
                    'Shot number', 'Timestamp', 'Data valid', 'Peak density'
                ]
                if dset.shape != (sn_size, ):
                    # shape is not consistent among all datasets
                    why = "'/Interferometer summary list' shape " \
                          "is not consistent across all " \
                          "interferometers"
                    raise HDFMappingError(self.info['group path'], why=why)
                elif not all(field in dset.dtype.names
                             for field in expected_fields):
                    # required fields are not present
                    why = "'/Interferometer summary list' does " \
                          "NOT have required fields"
                    raise HDFMappingError(self.info['group path'], why=why)

                # update 'shotnum'
                self._configs['shotnum']['dset paths'].append(path)
                self._configs['shotnum']['shape'].append(
                    dset.dtype['Shot number'].shape)

                # update 'meta/timestamp'
                self._configs['meta']['timestamp']['dset paths'].append(
                    dset.name)
                self._configs['meta']['timestamp']['shape'].append(
                    dset.dtype['Timestamp'].shape)

                # update 'meta/data valid'
                self._configs['meta']['data valid']['dset paths'].append(
                    dset.name)
                self._configs['meta']['data valid']['shape'].append(
                    dset.dtype['Data valid'].shape)

                # update 'meta/peak density'
                self._configs['meta']['peak density']['dset paths'].append(
                    dset.name)
                self._configs['meta']['peak density']['shape'].append(
                    dset.dtype['Peak density'].shape)

                # -- update configs related to                      ----
                # -- 'Interferometer trace'                         ----
                # - dependent configs are:
                #   1. 'signals/signal'
                #
                dset_name = name + '/Interferometer trace'
                dset = self.group[dset_name]

                # check 'shape'
                if dset.shape != (sn_size, sig_size):
                    # shape is not consistent among all datasets
                    why = "'/Interferometer trace' shape is" \
                          "not consistent across all " \
                          "interferometers"
                    raise HDFMappingError(self.info['group path'], why=why)
                elif dset.dtype.names is not None:
                    # dataset has fields (it should not have fields)
                    why = "'/Interferometer trace' shape does" \
                          "not match expected shape "
                    raise HDFMappingError(self.info['group path'], why=why)

                # update 'signals/signal' values
                self._configs['signals']['signal']['dset paths'].append(
                    dset.name)

        # -- Post Populate Checks                                   ----
        # check 'shotnum'
        # 1. convert 'dset paths' from list to tuple
        # 2. convert 'shape' to a single tuple of shape
        self._configs['shotnum']['dset paths'] = \
            tuple(self._configs['shotnum']['dset paths'])
        sn_shapes = self._configs['shotnum']['shape']
        self._configs['shotnum']['shape'] = sn_shapes[0]

        # check 'signals' and 'meta'
        # 1. convert 'dset paths' from list to tuple
        # 2. every dataset has the same 'shape'
        for subfield in ('signals', 'meta'):
            subconfigs = self._configs[subfield]
            for field, config in subconfigs.items():
                # define shape
                if check_n_inter:
                    # 'n interferometer' was found in the HDF5 file
                    shape = (int(self._configs['n interferometer'][0]),
                             sig_size)
                else:
                    # 'n interferometer' was NOT found, rely on count
                    shape = (n_inter_count, sig_size)

                # update ['meta']['shape']
                if field == 'shape' and subfield == 'meta':
                    self._configs[subfield][field] = (shape[0], )
                    continue

                # convert ['dset paths'] to tuple
                self._configs[subfield][field]['dset paths'] = \
                    tuple(config['dset paths'])

                # ensure all fields have the same shape
                if subfield == 'signals':
                    self._configs[subfield][field]['shape'] = shape
                else:
                    shapes = self._configs[subfield][field]['shape']
                    if all(shape == shapes[0] for shape in shapes):
                        self._configs[subfield][field]['shape'] = \
                            shapes[0]
                    else:
                        why = ("dataset shape for field '" + field +
                               "' is not consistent for all " +
                               "interferometers")
                        raise HDFMappingError(self.info['group path'], why=why)

        # ensure the number of found interferometers is equal to the
        # diagnostics 'Interferometer count'
        #
        if check_n_inter:
            if n_inter_count != self._configs['n interferometer'][0]:
                why = 'num. of found interferometers did not ' \
                      'match the expected num. of interferometers'
                raise HDFMappingError(self.info['group path'], why=why)
Exemple #4
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # check there are configurations to map
        if len(self.subgroup_names) == 0:
            why = "has no mappable configurations"
            raise HDFMappingError(self._info["group path"], why=why)

        # build configuration dictionaries
        # - assume every sub-group represents a unique configuration
        #   to the control device
        # - the name of each sub-group is used as the configuration
        #   name
        # - assume all configurations are active (i.e. used)
        #
        for name in self.subgroup_names:
            # get configuration group
            cong = self.group[name]

            # get dataset
            try:
                dset = self.group[self.construct_dataset_name()]
            except KeyError:
                why = (
                    f"Dataset '{self.construct_dataset_name()}' not found for "
                    f"configuration group '{name}'"
                )
                raise HDFMappingError(self._info["group path"], why=why)

            # initialize _configs
            self._configs[name] = {}

            # ---- define general info values                       ----
            pairs = [
                ("IP address", "IP address"),
                ("generator device", "Generator type"),
                ("GPIB address", "GPIB address"),
                ("initial state", "Initial state"),
                ("command list", "Waveform command list"),
            ]
            for pair in pairs:
                try:
                    # get attribute value
                    val = cong.attrs[pair[1]]

                    # condition value
                    if pair[0] == "command list":
                        # - val gets returned as a np.bytes_ string
                        # - split line returns
                        # - remove trailing/leading whitespace
                        #
                        val = _bytes_to_str(val).splitlines()
                        val = tuple([cls.strip() for cls in val])
                    elif pair[0] in ("IP address", "generator device", "initial state"):
                        # - val is a np.bytes_ string
                        #
                        val = _bytes_to_str(val)
                    else:
                        # no conditioning is needed
                        # 'GPIB address' val is np.uint32
                        pass

                    # assign val to _configs
                    self._configs[name][pair[0]] = val
                except KeyError:
                    self._configs[name][pair[0]] = None
                    warn_str = (
                        f"Attribute '{pair[1]}' not found in control device '"
                        f"{self.device_name}' configuration group '{name}'"
                    )
                    if pair[0] != "command list":
                        warn_str += ", continuing with mapping"
                        warn(warn_str)
                    else:
                        why = (
                            f"Attribute '{pair[1]}' not found for configuration "
                            f"group '{name}'"
                        )
                        raise HDFMappingError(self._info["group path"], why=why)

            # ---- define 'dset paths'                              ----
            self._configs[name]["dset paths"] = (dset.name,)

            # ---- define 'shotnum'                                 ----
            # initialize
            self._configs[name]["shotnum"] = {
                "dset paths": self._configs[name]["dset paths"],
                "dset field": ("Shot number",),
                "shape": dset.dtype["Shot number"].shape,
                "dtype": np.int32,
            }

            # ---- define 'state values'                            ----
            # catch and suppress warnings only for initialization
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                try:
                    sv_state = self._construct_state_values_dict(
                        name, self._default_re_patterns
                    )
                except KeyError:
                    sv_state = {}

            # initialize
            self._configs[name]["state values"] = (
                sv_state if bool(sv_state) else self._default_state_values_dict(name)
            )
Exemple #5
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # look for required datasets
        for dset_name in [
            "Magnet power supply currents",
            "Magnetic field profile",
            "Magnetic field summary",
        ]:
            if dset_name not in self.group:
                why = f"dataset '{dset_name}' not found"
                raise HDFMappingError(self.info["group path"], why=why)

        # initialize general info values
        pairs = [("calib tag", "Calibration tag"), ("z", "Profile z locations")]
        for pair in pairs:
            try:
                val = self.group.attrs[pair[1]]
                if isinstance(val, (list, tuple, np.ndarray)):
                    self._configs[pair[0]] = val
                else:
                    self._configs[pair[0]] = [val]
            except KeyError:
                self._configs[pair[0]] = []
                warn(
                    f"Attribute '{pair[1]}' not found for MSI diagnostic "
                    f"'{self.device_name}', continuing with mapping"
                )

        # initialize 'shape'
        # - this is used by HDFReadMSI
        self._configs["shape"] = ()

        # initialize 'shotnum'
        self._configs["shotnum"] = {
            "dset paths": (),
            "dset field": ("Shot number",),
            "shape": (),
            "dtype": np.int32,
        }

        # initialize 'signals'
        # - there are two signal fields
        #   1. 'magnet ps current'
        #   2. 'magnetic field'
        #
        self._configs["signals"] = {
            "magnet ps current": {
                "dset paths": (),
                "dset field": (),
                "shape": (),
                "dtype": np.float32,
            },
            "magnetic field": {
                "dset paths": (),
                "dset field": (),
                "shape": (),
                "dtype": np.float32,
            },
        }

        # initialize 'meta'
        self._configs["meta"] = {
            "shape": (),
            "timestamp": {
                "dset paths": (),
                "dset field": ("Timestamp",),
                "shape": (),
                "dtype": np.float64,
            },
            "data valid": {
                "dset paths": (),
                "dset field": ("Data valid",),
                "shape": (),
                "dtype": np.int8,
            },
            "peak magnetic field": {
                "dset paths": (),
                "dset field": ("Peak magnetic field",),
                "shape": (),
                "dtype": np.float32,
            },
        }

        # ---- update configs related to 'Magnetic field summary'   ----
        # - dependent configs are:
        #   1. 'shotnum'
        #   2. all of 'meta'
        #
        dset_name = "Magnetic field summary"
        dset = self.group[dset_name]

        # define 'shape'
        expected_fields = [
            "Shot number",
            "Timestamp",
            "Data valid",
            "Peak magnetic field",
        ]
        if dset.ndim == 1 and all(field in dset.dtype.names for field in expected_fields):
            self._configs["shape"] = dset.shape
        else:
            why = "'/Magnetic field summary' does not match expected shape"
            raise HDFMappingError(self.info["group path"], why=why)

        # update 'shotnum'
        self._configs["shotnum"]["dset paths"] = (dset.name,)
        self._configs["shotnum"]["shape"] = dset.dtype["Shot number"].shape

        # update 'meta/timestamp'
        self._configs["meta"]["timestamp"]["dset paths"] = (dset.name,)
        self._configs["meta"]["timestamp"]["shape"] = dset.dtype["Timestamp"].shape

        # update 'meta/data valid'
        self._configs["meta"]["data valid"]["dset paths"] = (dset.name,)
        self._configs["meta"]["data valid"]["shape"] = dset.dtype["Data valid"].shape

        # update 'meta/peak magnetic field'
        self._configs["meta"]["peak magnetic field"]["dset paths"] = (dset.name,)
        self._configs["meta"]["peak magnetic field"]["shape"] = dset.dtype[
            "Peak magnetic field"
        ].shape

        # update configs related to 'Magnet power supply currents'  ----
        # - dependent configs are:
        #   1. 'signals/magnet ps current'
        #
        dset_name = "Magnet power supply currents"
        dset = self.group[dset_name]
        self._configs["signals"]["magnet ps current"]["dset paths"] = (dset.name,)

        # check 'shape'
        _build_success = True
        if dset.dtype.names is not None:
            # dataset has fields (it should not have fields)
            _build_success = False
        elif dset.ndim == 2:
            if dset.shape[0] == self._configs["shape"][0]:
                self._configs["signals"]["magnet ps current"]["shape"] = (dset.shape[1],)
            else:
                _build_success = False
        else:
            _build_success = False
        if not _build_success:
            why = "'/Magnet power supply currents' does not match expected shape"
            raise HDFMappingError(self.info["group path"], why=why)

        # update configs related to 'Magnetic field profile'        ----
        # - dependent configs are:
        #   1. 'signals/magnetic field'
        #
        dset_name = "Magnetic field profile"
        dset = self.group[dset_name]
        self._configs["signals"]["magnetic field"]["dset paths"] = (dset.name,)

        # check 'shape'
        _build_success = True
        if dset.dtype.names is not None:
            # dataset has fields (it should not have fields)
            _build_success = False
        elif dset.ndim == 2:
            if dset.shape[0] == self._configs["shape"][0]:
                self._configs["signals"]["magnetic field"]["shape"] = (dset.shape[1],)
            else:
                _build_success = False
        else:
            _build_success = False
        if not _build_success:
            why = "'/Magnetic field profile' does not match expected shape"
            raise HDFMappingError(self.info["group path"], why=why)
Exemple #6
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # What should be in configs
        # 1. num. of interferometers
        # 2. start times for each interferometers
        # 3. dt for each interferometer
        # 4. n_bar_L for each interferometer
        # 5. z location for each interferometer
        # 6. 'shotnum' field
        #    - contains mapping of HDF5 file quantity to np
        #    a. shape
        #    b. dtype
        # 7. 'signals' field
        #    - another dict where keys are the fields to be added to
        #      the np.array
        # 8. 'meta' field
        #

        # initialize general info values
        # - pairs[0:2] are found in the main group's attributes
        # - pairs[2] corresponds to the sub-group names
        # - pairs[3:] are found in the main group's attributes (as an
        #     array) and in the sub-group attributes (elements of the
        #     main group's array)...I'm choosing to populate via the
        #     sub-group attributes to ensure one-to-one correspondence
        #     when extracting data with the HDFReadMSI class
        #
        pairs = [
            ("n interferometer", "Interferometer count"),
            ("calib tag", "Calibration tag"),
            ("interferometer name", None),
            ("t0", "Start time"),
            ("dt", "Timestep"),
            ("n_bar_L", "n_bar_L"),
            ("z", "z location"),
        ]
        self._configs["interferometer name"] = []
        self._configs["t0"] = []
        self._configs["dt"] = []
        self._configs["n_bar_L"] = []
        self._configs["z"] = []
        for pair in pairs[0:2]:
            try:
                val = self.group.attrs[pair[1]]
                if isinstance(val, (list, tuple, np.ndarray)):
                    self._configs[pair[0]] = val
                else:
                    self._configs[pair[0]] = [val]
            except KeyError:
                self._configs[pair[0]] = []
                warn(f"Attribute '{pair[1]}' not found for MSI diagnostic "
                     f"'{self.device_name}', continuing with mapping")

        # more handling of general info value 'n interferometer'
        pair = pairs[0]
        check_n_inter = True
        if len(self._configs[pair[0]]) != 1:
            check_n_inter = False
            warn(
                f"Attribute '{pair[1]}' for MSI diagnostic "
                f"'{self.device_name}' not an integer, continuing with mapping"
            )
        elif not isinstance(self._configs[pair[0]][0], (int, np.integer)):
            check_n_inter = False
            warn(
                f"Attribute '{pair[1]}' for MSI diagnostic "
                f"'{self.device_name}' not an integer, continuing with mapping"
            )

        # initialize 'shape'
        # - this is used by HDFReadMSI
        self._configs["shape"] = ()

        # initialize 'shotnum'
        self._configs["shotnum"] = {
            "dset paths": [],
            "dset field": ("Shot number", ),
            "shape": [],
            "dtype": np.int32,
        }

        # initialize 'signals'
        # - there is only one signal field named 'signal'
        self._configs["signals"] = {
            "signal": {
                "dset paths": [],
                "dset field": (),
                "shape": [],
                "dtype": np.float32,
            }
        }

        # initialize 'meta'
        self._configs["meta"] = {
            "timestamp": {
                "dset paths": [],
                "dset field": ("Timestamp", ),
                "shape": [],
                "dtype": np.float64,
            },
            "data valid": {
                "dset paths": [],
                "dset field": ("Data valid", ),
                "shape": [],
                "dtype": np.int8,
            },
            "peak density": {
                "dset paths": [],
                "dset field": ("Peak density", ),
                "shape": [],
                "dtype": np.float32,
            },
        }
        self._configs["meta"]["shape"] = (() if not check_n_inter else (int(
            self._configs["n interferometer"][0]), ))

        # populate self.configs from each interferometer group
        # - all the population is done in this for-loop to ensure all
        #   lists are one-to-one
        #
        n_inter_count = 0
        sn_size = 0
        sig_size = 0
        for name in self.group:
            if isinstance(self.group[name],
                          h5py.Group) and "Interferometer" in name:
                # count the number of interferometers
                n_inter_count += 1

                # ensure required datasets are present
                for dset_name in [
                        "Interferometer summary list", "Interferometer trace"
                ]:
                    if dset_name not in self.group[name]:
                        why = (f"dataset '{dset_name}' not found for "
                               f"'Interferometer/{name}'")
                        raise HDFMappingError(self.info["group path"], why=why)

                # populate general info values
                self._configs["interferometer name"].append(name)
                for pair in pairs[3::]:
                    try:
                        self._configs[pair[0]].append(
                            self.group[name].attrs[pair[1]])
                    except KeyError:
                        self._configs[pair[0]].append(None)
                        warn(
                            f"Attribute '{pair[1]}' not found for MSI diagnostic "
                            f"'{self.device_name}/{name}', continuing with mapping"
                        )

                # define values to ensure dataset sizes are consistent
                # sn_size  = number of shot numbers
                # sig_size = number of samples in interferometer trace
                #            - the way the np.array will be constructed
                #              requires all interferometer signals to
                #              have the same sample size
                # - define sn_size and ensure it's consistent among all
                #   datasets
                # - define sig_size and ensure it's consistent among all
                #   datasets
                #
                # - Enforcement of the these dimensions is done when
                #   mapping each dataset below
                #
                if n_inter_count == 1:
                    # define sn_size
                    dset_name = f"{name}/Interferometer summary list"
                    dset = self.group[dset_name]
                    if dset.ndim == 1:
                        sn_size = self.group[dset_name].shape[0]
                    else:
                        why = ("'/Interferometer summary list' "
                               "does not match expected shape")
                        raise HDFMappingError(self.info["group path"], why=why)

                    # define sig_size
                    dset_name = f"{name}/Interferometer trace"
                    dset = self.group[dset_name]
                    shape = self.group[dset_name].shape
                    if dset.dtype.names is not None:
                        # dataset has fields (it should not have fields)
                        why = (f"can not handle a 'signal' dataset "
                               f"({dset_name}) with fields")
                        raise HDFMappingError(self.info["group path"], why=why)
                    elif dset.ndim == 2:
                        if dset.shape[0] == sn_size:
                            sig_size = shape[1]
                        else:
                            why = ("'Interferometer trace' and "
                                   "'Interferometer summary list' do "
                                   "not have same number of rows "
                                   "(shot numbers)")
                            raise HDFMappingError(self.info["group path"],
                                                  why=why)
                    else:
                        why = "'/Interferometer race' does not match expected shape"
                        raise HDFMappingError(self.info["group path"], why=why)

                    # define 'shape'
                    self._configs["shape"] = (sn_size, )

                # -- update configs related to                      ----
                # -- 'Interferometer summary list'                  ----
                # - dependent configs are:
                #   1. 'shotnum'
                #   2. all of 'meta'
                #
                dset_name = f"{name}/Interferometer summary list"
                dset = self.group[dset_name]
                path = dset.name

                # check 'shape'
                expected_fields = [
                    "Shot number",
                    "Timestamp",
                    "Data valid",
                    "Peak density",
                ]
                if dset.shape != (sn_size, ):
                    # shape is not consistent among all datasets
                    why = ("'/Interferometer summary list' shape "
                           "is not consistent across all "
                           "interferometers")
                    raise HDFMappingError(self.info["group path"], why=why)
                elif not all(field in dset.dtype.names
                             for field in expected_fields):
                    # required fields are not present
                    why = "'/Interferometer summary list' does NOT have required fields"
                    raise HDFMappingError(self.info["group path"], why=why)

                # update 'shotnum'
                self._configs["shotnum"]["dset paths"].append(path)
                self._configs["shotnum"]["shape"].append(
                    dset.dtype["Shot number"].shape)

                # update 'meta/timestamp'
                self._configs["meta"]["timestamp"]["dset paths"].append(
                    dset.name)
                self._configs["meta"]["timestamp"]["shape"].append(
                    dset.dtype["Timestamp"].shape)

                # update 'meta/data valid'
                self._configs["meta"]["data valid"]["dset paths"].append(
                    dset.name)
                self._configs["meta"]["data valid"]["shape"].append(
                    dset.dtype["Data valid"].shape)

                # update 'meta/peak density'
                self._configs["meta"]["peak density"]["dset paths"].append(
                    dset.name)
                self._configs["meta"]["peak density"]["shape"].append(
                    dset.dtype["Peak density"].shape)

                # -- update configs related to                      ----
                # -- 'Interferometer trace'                         ----
                # - dependent configs are:
                #   1. 'signals/signal'
                #
                dset_name = f"{name}/Interferometer trace"
                dset = self.group[dset_name]

                # check 'shape'
                if dset.shape != (sn_size, sig_size):
                    # shape is not consistent among all datasets
                    why = ("'/Interferometer trace' shape is"
                           "not consistent across all "
                           "interferometers")
                    raise HDFMappingError(self.info["group path"], why=why)
                elif dset.dtype.names is not None:
                    # dataset has fields (it should not have fields)
                    why = "'/Interferometer trace' shape does not match expected shape "
                    raise HDFMappingError(self.info["group path"], why=why)

                # update 'signals/signal' values
                self._configs["signals"]["signal"]["dset paths"].append(
                    dset.name)

        # -- Post Populate Checks                                   ----
        # check 'shotnum'
        # 1. convert 'dset paths' from list to tuple
        # 2. convert 'shape' to a single tuple of shape
        self._configs["shotnum"]["dset paths"] = tuple(
            self._configs["shotnum"]["dset paths"])
        sn_shapes = self._configs["shotnum"]["shape"]
        self._configs["shotnum"]["shape"] = sn_shapes[0]

        # check 'signals' and 'meta'
        # 1. convert 'dset paths' from list to tuple
        # 2. every dataset has the same 'shape'
        for subfield in ("signals", "meta"):
            subconfigs = self._configs[subfield]
            for field, config in subconfigs.items():
                # define shape
                if check_n_inter:
                    # 'n interferometer' was found in the HDF5 file
                    shape = (int(self._configs["n interferometer"][0]),
                             sig_size)
                else:
                    # 'n interferometer' was NOT found, rely on count
                    shape = (n_inter_count, sig_size)

                # update ['meta']['shape']
                if field == "shape" and subfield == "meta":
                    self._configs[subfield][field] = (shape[0], )
                    continue

                # convert ['dset paths'] to tuple
                self._configs[subfield][field]["dset paths"] = tuple(
                    config["dset paths"])

                # ensure all fields have the same shape
                if subfield == "signals":
                    self._configs[subfield][field]["shape"] = shape
                else:
                    shapes = self._configs[subfield][field]["shape"]
                    if all(shape == shapes[0] for shape in shapes):
                        self._configs[subfield][field]["shape"] = shapes[0]
                    else:
                        why = (
                            f"dataset shape for field '{field}' is not consistent "
                            f"for all interferometers")
                        raise HDFMappingError(self.info["group path"], why=why)

        # ensure the number of found interferometers is equal to the
        # diagnostics 'Interferometer count'
        #
        if check_n_inter:
            if n_inter_count != self._configs["n interferometer"][0]:
                why = ("num. of found interferometers did not "
                       "match the expected num. of interferometers")
                raise HDFMappingError(self.info["group path"], why=why)
Exemple #7
0
    def _find_adc_connections(
        self, adc_name: str, config_name: str
    ) -> Tuple[Tuple[int, Tuple[int, ...], Dict[str, Any]], ...]:
        """
        Determines active connections on the adc.

        :param adc_name: name of the analog-digital-converter
        :param config_name: digitizer configuration name

        :return:

            Tuple of 3-element tuples where the 1st element of the
            nested tuple represents a connected *board* number, the 2nd
            element is a tuple of connected *channel* numbers for the
            *board*, and the 3rd element is a dictionary of adc setup
            values (*bit*, *clock rate*, etc.).

        On determination of adc connections, the meta-info dict will
        also be populated with:

        .. csv-table::
            :header: "Key", "Description"
            :widths: 20, 60

            "::

                'clock rate'
            ", "
            clock rate of the digitizer's analog-digital-converter
            "
            "::

                'shot average (software)'
            ", "
            number of shots intended to be averaged over
            "
            "::

                'sample average (hardware)'
            ", "
            number of data samples average together
            "
        """
        config_path = self.configs[config_name]["config group path"]
        config_group = self.group.get(config_path)
        active = self.configs[config_name]["active"]

        # initialize conn
        # conn = list of connections
        #
        conn = []

        # define _helpers
        if adc_name not in ("SIS 3302", "SIS 3305"):  # pragma: no cover
            # this should never happen
            warn(f"Invalid adc name '{adc_name}'")
            return ()
        _helpers = {
            "SIS 3302": {
                "short": "3302",
                "re": r"SIS crate 3302 configurations\[(?P<INDEX>\d+)\]",
            },
            "SIS 3305": {
                "short": "3305",
                "re": r"SIS crate 3305 configurations\[(?P<INDEX>\d+)\]",
            },
        }

        # get slot numbers and configuration indices
        slots = config_group.attrs["SIS crate slot numbers"]  # type: np.ndarray
        indices = config_group.attrs["SIS crate config indices"]  # type: np.ndarray

        # ensure slots and indices are 1D arrays of the same size
        if slots.ndim != 1 or indices.ndim != 1:
            raise HDFMappingError(
                self.info["group path"],
                "HDF5 structure unexpected...Defined slots and "
                "configuration indices are not 1D arrays.",
            )
        elif slots.size != indices.size:
            raise HDFMappingError(
                self.info["group path"],
                "HDF5 structure unexpected...Defined slots and "
                "configuration indices are not the same size.",
            )

        # ensure defined slots are unique
        if np.unique(slots).size != slots.size:
            raise HDFMappingError(
                self.info["group path"],
                "HDF5 structure unexpected...defined slot numbers are not unique",
            )

        # Build tuple (slot, config index, board, adc)
        # - build a tuple that pairs the adc name (adc), adc slot
        #   number (slot), configuration group index (index), and
        #   board number (brd)
        #
        adc_pairs = []
        for slot, index in zip(slots, indices):
            if slot != 3:
                try:
                    brd, adc = self.slot_info[slot]
                    adc_pairs.append((slot, index, brd, adc))
                except KeyError:
                    why = (
                        f"HDF5 structure unexpected...defined slot number {slot} "
                        f"is unexpected...not adding to `configs` mapping"
                    )
                    warn(why)

        # Ensure the same configuration index is not assign to multiple
        # slots for the same adc
        for slot, index, brd, adc in adc_pairs:
            for ss, ii, bb, aa in adc_pairs:
                if ii == index and aa == adc and ss != slot:
                    why = (
                        "The same configuration index is assigned "
                        "to multiple slots of the same adc."
                    )
                    if active:
                        raise HDFMappingError(self.info["group path"], why=why)
                    else:
                        why += "...config not active so not adding to mapping"
                        warn(why)
                        return ()

        # gather adc configuration groups
        gnames = []
        for name in config_group:
            _match = re.fullmatch(_helpers[adc_name]["re"], name)
            if bool(_match):
                gnames.append((name, int(_match.group("INDEX"))))

        # Determine connected (brd, ch) combinations
        for name, config_index in gnames:
            # find board number
            brd = None
            for slot, index, board, adc in adc_pairs:
                if adc_name == adc and config_index == index:
                    brd = board
                    break

            # ensure board number was found
            if brd is None:
                why = (
                    f"Board not found since group name determined "
                    f"`config_index` {config_index} not defined in top-level "
                    f"configuration group"
                )
                warn(why)
                continue

            # find connected channels
            chs = []
            if adc_name == "SIS 3302":
                _patterns = (r"Enabled\s(?P<CH>\d+)",)
            else:
                # SIS 3305
                _patterns = (
                    r"FPGA 1 Enabled\s(?P<CH>\d+)",
                    r"FPGA 2 Enabled\s(?P<CH>\d+)",
                )
            for key, val in config_group[name].attrs.items():
                if "Enabled" in key and val == b"TRUE":
                    ch = None
                    for pat in _patterns:
                        _match = re.fullmatch(pat, key)
                        if bool(_match):
                            ch = int(_match.group("CH"))
                            if "FPGA 2" in pat:
                                ch += 4
                            break

                    if ch is not None:
                        chs.append(ch)

            # ensure chs is not NULL
            if len(chs) == 0:
                why = (
                    f"HDF5 structure unexpected...'{config_name}/{name}' does "
                    f"not define any valid channel numbers...not adding to "
                    f"`configs` dict"
                )
                warn(why)

                # skip adding to conn list
                continue

            # determine shot averaging
            shot_ave = None
            if "Shot averaging (software)" in config_group[name].attrs:
                shot_ave = config_group[name].attrs["Shot averaging (software)"]
                if shot_ave in (0, 1):
                    shot_ave = None

            # determine sample averaging
            sample_ave = None
            if adc_name == "SIS 3305":
                # the 'SIS 3305' adc does NOT support sample averaging
                pass
            else:
                # SIS 3302
                # - the HDF5 attribute is the power to 2
                # - So, a hardware sample of 5 actually means the number
                #   of points sampled is 2^5
                if "Sample averaging (hardware)" in config_group[name].attrs:
                    sample_ave = config_group[name].attrs["Sample averaging (hardware)"]
                    if sample_ave == 0:
                        sample_ave = None
                    else:
                        sample_ave = 2 ** sample_ave

            # determine clock rate
            if adc_name == "SIS 3305":
                # has different clock rate modes
                try:
                    cr_mode = config_group[name].attrs["Channel mode"]
                    cr_mode = int(cr_mode)
                except (KeyError, ValueError):
                    why = (
                        f"HDF5 structure unexpected...'{config_name}/{name}' does "
                        f"not define a clock rate mode...setting to None in the "
                        f"`configs` dict"
                    )
                    warn(why)
                    cr_mode = -1
                if cr_mode == 0:
                    cr = u.Quantity(1.25, unit="GHz")
                elif cr_mode == 1:
                    cr = u.Quantity(2.5, unit="GHz")
                elif cr_mode == 2:
                    cr = u.Quantity(5.0, unit="GHz")
                else:
                    cr = None
            else:
                # 'SIS 3302' has one clock rate mode
                cr = u.Quantity(100.0, unit="MHz")

            # build subconn tuple with connected board, channels, and
            # acquisition parameters
            subconn = (
                brd,
                tuple(chs),
                {
                    "clock rate": cr,
                    "shot average (software)": shot_ave,
                    "sample average (hardware)": sample_ave,
                },
            )

            # add to all connections list
            conn.append(subconn)

        return tuple(conn)
Exemple #8
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # look for required datasets
        for dset_name in [
                'Cathode-anode voltage', 'Discharge current',
                'Discharge summary'
        ]:
            if dset_name not in self.group:
                why = "dataset '" + dset_name + "' not found "
                raise HDFMappingError(self.info['group path'], why=why)

        # initialize general info values
        pairs = [('current conversion factor', 'Current conversion factor'),
                 ('voltage conversion factor', 'Voltage conversion factor'),
                 ('t0', 'Start time'), ('dt', 'Timestep')]
        for pair in pairs:
            try:
                self._configs[pair[0]] = [self.group.attrs[pair[1]]]
            except KeyError:
                self._configs[pair[0]] = []
                warn("Attribute '" + pair[1] +
                     "' not found for MSI diagnostic '" + self.device_name +
                     "', continuing with mapping")

        # initialize 'shape'
        # - this is used by HDFReadMSI
        self._configs['shape'] = ()

        # initialize 'shotnum'
        self._configs['shotnum'] = {
            'dset paths': (),
            'dset field': ('Shot number', ),
            'shape': (),
            'dtype': np.int32,
        }

        # initialize 'signals'
        # - there are two signal fields
        #   1. 'voltage'
        #   2. 'current'
        #
        self._configs['signals'] = {
            'voltage': {
                'dset paths': (),
                'dset field': (),
                'shape': (),
                'dtype': np.float32,
            },
            'current': {
                'dset paths': (),
                'dset field': (),
                'shape': (),
                'dtype': np.float32,
            }
        }

        # initialize 'meta'
        self._configs['meta'] = {
            'shape': (),
            'timestamp': {
                'dset paths': (),
                'dset field': ('Timestamp', ),
                'shape': (),
                'dtype': np.float64,
            },
            'data valid': {
                'dset paths': (),
                'dset field': ('Data valid', ),
                'shape': (),
                'dtype': np.int8,
            },
            'pulse length': {
                'dset paths': (),
                'dset field': ('Pulse length', ),
                'shape': (),
                'dtype': np.float32,
            },
            'peak current': {
                'dset paths': (),
                'dset field': ('Peak current', ),
                'shape': (),
                'dtype': np.float32,
            },
            'bank voltage': {
                'dset paths': (),
                'dset field': ('Bank voltage', ),
                'shape': (),
                'dtype': np.float32,
            },
        }

        # ---- update configs related to 'Discharge summary'        ----
        # - dependent configs are:
        #   1. 'shape'
        #   2. 'shotnum'
        #   3. all of 'meta'
        #
        dset_name = 'Discharge summary'
        dset = self.group[dset_name]

        # define 'shape'
        expected_fields = [
            'Shot number', 'Timestamp', 'Data valid', 'Pulse length',
            'Peak current', 'Bank voltage'
        ]
        if dset.ndim == 1 and \
                all(field in dset.dtype.names
                    for field in expected_fields):
            self._configs['shape'] = dset.shape
        else:
            why = "'/Discharge summary' does not match expected shape"
            raise HDFMappingError(self.info['group path'], why=why)

        # update 'shotnum'
        self._configs['shotnum']['dset paths'] = (dset.name, )
        self._configs['shotnum']['shape'] = \
            dset.dtype['Shot number'].shape

        # update 'meta/timestamp'
        self._configs['meta']['timestamp']['dset paths'] = (dset.name, )
        self._configs['meta']['timestamp']['shape'] = \
            dset.dtype['Timestamp'].shape

        # update 'meta/data valid'
        self._configs['meta']['data valid']['dset paths'] = (dset.name, )
        self._configs['meta']['data valid']['shape'] = \
            dset.dtype['Data valid'].shape

        # update 'meta/pulse length'
        self._configs['meta']['pulse length']['dset paths'] = \
            (dset.name,)
        self._configs['meta']['pulse length']['shape'] = \
            dset.dtype['Pulse length'].shape

        # update 'meta/peak current'
        self._configs['meta']['peak current']['dset paths'] = \
            (dset.name,)
        self._configs['meta']['peak current']['shape'] = \
            dset.dtype['Peak current'].shape

        # update 'meta/bank voltage'
        self._configs['meta']['bank voltage']['dset paths'] = \
            (dset.name,)
        self._configs['meta']['bank voltage']['shape'] = \
            dset.dtype['Bank voltage'].shape

        # ---- update configs related to 'Cathode-anode voltage'   ----
        # - dependent configs are:
        #   1. 'signals/voltage'
        #
        dset_name = 'Cathode-anode voltage'
        dset = self.group[dset_name]
        self._configs['signals']['voltage']['dset paths'] = (dset.name, )

        # check 'shape'
        _build_success = True
        if dset.dtype.names is not None:
            # dataset has fields (it should not have fields)
            _build_success = False
        elif dset.ndim == 2:
            if dset.shape[0] == self._configs['shape'][0]:
                self._configs['signals']['voltage']['shape'] = \
                    (dset.shape[1],)
            else:
                _build_success = False
        else:
            _build_success = False
        if not _build_success:
            why = "'/Cathode-anode voltage' does not match expected " \
                  "shape"
            raise HDFMappingError(self.info['group path'], why=why)

        # update configs related to 'Discharge current'             ----
        # - dependent configs are:
        #   1. 'signals/current'
        #
        dset_name = 'Discharge current'
        dset = self.group[dset_name]
        self._configs['signals']['current']['dset paths'] = (dset.name, )

        # check 'shape'
        _build_success = True
        if dset.dtype.names is not None:
            # dataset has fields (it should not have fields)
            _build_success = False
        elif dset.ndim == 2:
            if dset.shape[0] == self._configs['shape'][0]:
                self._configs['signals']['current']['shape'] = \
                    (dset.shape[1],)
            else:
                _build_success = False
        else:
            _build_success = False
        if not _build_success:
            why = "'/Discharge current' does not match expected shape"
            raise HDFMappingError(self.info['group path'], why=why)
Exemple #9
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # build order:
        #  1. build a local motion list dictionary
        #  2. build a local probe list dictionary
        #  3. build configs dict
        #
        # TODO: HOW TO ADD MOTION LIST TO DICT
        # - right now, the dataset has to be read which has the
        #   potential for creating long mapping times
        # - this is probably best left to HDFReadControls
        #

        # build 'motion list' and 'probe list'
        _motion_lists = {}
        _probe_lists = {}
        for name in self.subgroup_names:
            ml_stuff = self._analyze_motionlist(name)
            if bool(ml_stuff):
                # build 'motion list'
                _motion_lists[ml_stuff['name']] = \
                    ml_stuff['config']
            else:
                pl_stuff = self._analyze_probelist(name)
                if bool(pl_stuff):
                    # build 'probe list'
                    _probe_lists[pl_stuff['name']] = pl_stuff['config']

        # ensure a PL item (config group) is found
        if len(_probe_lists) == 0:
            why = 'has no mappable configurations (Probe List groups)'
            raise HDFMappingError(self._info['group path'], why=why)

        # build configuration dictionaries
        # - the receptacle number is the config_name
        # - each probe is one-to-one with receptacle number
        #
        for pname in _probe_lists:
            # define configuration name
            config_name = _probe_lists[pname]['receptacle']

            # initialize _configs
            self._configs[config_name] = {}

            # ---- define general info values                       ----
            # - this has to be done before getting the dataset since
            #   the _configs dist is used by construct_dataset_name()
            #
            # add motion list info
            self._configs[config_name]['motion lists'] = \
                _motion_lists

            # add probe info
            self._configs[config_name]['probe'] = _probe_lists[pname]

            # add 'receptacle'
            self._configs[config_name]['receptacle'] = \
                self._configs[config_name]['probe']['receptacle']

            # ---- get configuration dataset                        ----
            try:
                dset_name = self.construct_dataset_name(config_name)
                dset = self.group[dset_name]
            except (KeyError, ValueError):
                # KeyError: the dataset was not found
                # ValueError: the dataset name was not properly
                #             constructed
                #
                why = ("Dataset for configuration "
                       + "'{}'".format(pname)
                       + "  could not be determined or found")
                raise HDFMappingError(self._info['group path'], why=why)

            # ---- define 'dset paths'                              ----
            self._configs[config_name]['dset paths'] = (dset.name,)

            # ---- define 'shotnum'                                 ----
            # initialize
            self._configs[config_name]['shotnum'] = {
                'dset paths': self._configs[config_name]['dset paths'],
                'dset field': ('Shot number',),
                'shape': dset.dtype['Shot number'].shape,
                'dtype': np.int32,
            }

            # ---- define 'state values'                            ----
            self._configs[config_name]['state values'] = {
                'xyz': {
                    'dset paths':
                        self._configs[config_name]['dset paths'],
                    'dset field': ('x', 'y', 'z'),
                    'shape': (3,),
                    'dtype': np.float64
                },
                'ptip_rot_theta': {
                    'dset paths':
                        self._configs[config_name]['dset paths'],
                    'dset field': ('theta',),
                    'shape': (),
                    'dtype': np.float64
                },
                'ptip_rot_phi': {
                    'dset paths':
                        self._configs[config_name]['dset paths'],
                    'dset field': ('phi',),
                    'shape': (),
                    'dtype': np.float64
                },
            }
Exemple #10
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # build order:
        #  1. build a local motion list dictionary
        #  2. build a local probe list dictionary
        #  3. build configs dict
        #
        # TODO: HOW TO ADD MOTION LIST TO DICT
        # - right now, the dataset has to be read which has the
        #   potential for creating long mapping times
        # - this is probably best left to HDFReadControls
        #

        # build 'motion list' and 'probe list'
        _motion_lists = {}
        _probe_lists = {}
        for name in self.subgroup_names:
            ml_stuff = self._analyze_motionlist(name)
            if bool(ml_stuff):
                # build 'motion list'
                _motion_lists[ml_stuff["name"]] = ml_stuff["config"]
            else:
                pl_stuff = self._analyze_probelist(name)
                if bool(pl_stuff):
                    # build 'probe list'
                    _probe_lists[pl_stuff["probe-id"]] = pl_stuff["config"]

        # ensure a PL item (config group) is found
        if len(_probe_lists) == 0:
            why = "has no mappable configurations (Probe List groups)"
            raise HDFMappingError(self._info["group path"], why=why)

        # build configuration dictionaries
        # - the receptacle number is the config_name
        # - each probe is one-to-one with receptacle number
        #
        for pname in _probe_lists:
            # define configuration name
            config_name = _probe_lists[pname]["receptacle"]

            # initialize _configs
            self._configs[config_name] = {}

            # ---- define general info values                       ----
            # - this has to be done before getting the dataset since
            #   the _configs dist is used by construct_dataset_name()
            #
            # add motion list info
            self._configs[config_name]["motion lists"] = _motion_lists

            # add probe info
            self._configs[config_name]["probe"] = _probe_lists[pname]

            # add 'receptacle'
            self._configs[config_name]["receptacle"] = self._configs[
                config_name]["probe"]["receptacle"]

            # ---- get configuration dataset                        ----
            try:
                dset_name = self.construct_dataset_name(config_name)
                dset = self.group[dset_name]
            except (KeyError, ValueError):
                # KeyError: the dataset was not found
                # ValueError: the dataset name was not properly
                #             constructed
                #
                why = (f"Dataset for configuration '{pname}' could not be "
                       f"determined or found.")
                raise HDFMappingError(self._info["group path"], why=why)

            # ---- define 'dset paths'                              ----
            self._configs[config_name]["dset paths"] = (dset.name, )

            # ---- define 'shotnum'                                 ----
            # initialize
            self._configs[config_name]["shotnum"] = {
                "dset paths": self._configs[config_name]["dset paths"],
                "dset field": ("Shot number", ),
                "shape": dset.dtype["Shot number"].shape,
                "dtype": np.int32,
            }

            # ---- define 'state values'                            ----
            self._configs[config_name]["state values"] = {
                "xyz": {
                    "dset paths": self._configs[config_name]["dset paths"],
                    "dset field": ("x", "y", "z"),
                    "shape": (3, ),
                    "dtype": np.float64,
                },
                "ptip_rot_theta": {
                    "dset paths": self._configs[config_name]["dset paths"],
                    "dset field": ("theta", ),
                    "shape": (),
                    "dtype": np.float64,
                },
                "ptip_rot_phi": {
                    "dset paths": self._configs[config_name]["dset paths"],
                    "dset field": ("phi", ),
                    "shape": (),
                    "dtype": np.float64,
                },
            }
Exemple #11
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # look for required datasets
        for dset_name in ["Heater summary"]:
            if dset_name not in self.group:
                why = f"dataset '{dset_name}' not found"
                raise HDFMappingError(self.info["group path"], why=why)

        # initialize general info values
        pairs = [("calib tag", "Calibration tag")]
        for pair in pairs:
            try:
                self._configs[pair[0]] = [self.group.attrs[pair[1]]]
            except KeyError:
                self._configs[pair[0]] = []
                warn(f"Attribute '{pair[1]}' not found for MSI diagnostic "
                     f"'{self.device_name}', continuing with mapping")

        # initialize 'shape'
        # - this is used by HDFReadMSI
        self._configs["shape"] = ()

        # initialize 'shotnum'
        self._configs["shotnum"] = {
            "dset paths": (),
            "dset field": ("Shot number", ),
            "shape": (),
            "dtype": np.int32,
        }

        # initialize 'signals'
        # - there are NO signal fields
        #
        self._configs["signals"] = {}

        # initialize 'meta'
        self._configs["meta"] = {
            "shape": (),
            "timestamp": {
                "dset paths": (),
                "dset field": ("Timestamp", ),
                "shape": (),
                "dtype": np.float64,
            },
            "data valid": {
                "dset paths": (),
                "dset field": ("Data valid", ),
                "shape": (),
                "dtype": np.int8,
            },
            "current": {
                "dset paths": (),
                "dset field": ("Heater current", ),
                "shape": (),
                "dtype": np.float32,
            },
            "voltage": {
                "dset paths": (),
                "dset field": ("Heater voltage", ),
                "shape": (),
                "dtype": np.float32,
            },
            "temperature": {
                "dset paths": (),
                "dset field": ("Heater temperature", ),
                "shape": (),
                "dtype": np.float32,
            },
        }

        # ---- update configs related to 'Heater summary'           ----
        # - dependent configs are:
        #   1. 'shape'
        #   2. 'shotnum'
        #   3. all of 'meta'
        #
        dset_name = "Heater summary"
        dset = self.group[dset_name]

        # define 'shape'
        expected_fields = [
            "Shot number",
            "Timestamp",
            "Data valid",
            "Heater current",
            "Heater voltage",
            "Heater temperature",
        ]
        if dset.ndim == 1 and all(field in dset.dtype.names
                                  for field in expected_fields):
            self._configs["shape"] = dset.shape
        else:
            why = "'/Heater summary' does not match expected shape"
            raise HDFMappingError(self.info["group path"], why=why)

        # update 'shotnum'
        self._configs["shotnum"]["dset paths"] = (dset.name, )
        self._configs["shotnum"]["shape"] = dset.dtype["Shot number"].shape

        # update 'meta/timestamp'
        self._configs["meta"]["timestamp"]["dset paths"] = (dset.name, )
        self._configs["meta"]["timestamp"]["shape"] = dset.dtype[
            "Timestamp"].shape

        # update 'meta/data valid'
        self._configs["meta"]["data valid"]["dset paths"] = (dset.name, )
        self._configs["meta"]["data valid"]["shape"] = dset.dtype[
            "Data valid"].shape

        # update 'meta/current'
        self._configs["meta"]["current"]["dset paths"] = (dset.name, )
        self._configs["meta"]["current"]["shape"] = dset.dtype[
            "Heater current"].shape

        # update 'meta/voltage'
        self._configs["meta"]["voltage"]["dset paths"] = (dset.name, )
        self._configs["meta"]["voltage"]["shape"] = dset.dtype[
            "Heater voltage"].shape

        # update 'meta/current'
        self._configs["meta"]["temperature"]["dset paths"] = (dset.name, )
        self._configs["meta"]["temperature"]["shape"] = dset.dtype[
            "Heater temperature"].shape
Exemple #12
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # look for required datasets
        for dset_name in ["Gas pressure summary", "RGA partial pressures"]:
            if dset_name not in self.group:
                why = f"dataset '{dset_name}' not found"
                raise HDFMappingError(self.info["group path"], why=why)

        # initialize general info values
        pairs = [
            ("RGA AMUs", "RGA AMUs"),
            ("ion gauge calib tag", "Ion gauge calibration tag"),
            ("RGA calib tag", "RGA calibration tag"),
        ]
        for pair in pairs:
            try:
                val = self.group.attrs[pair[1]]
                if isinstance(val, (list, tuple, np.ndarray)):
                    self._configs[pair[0]] = val
                else:
                    self._configs[pair[0]] = [val]
            except KeyError:
                self._configs[pair[0]] = []
                warn(f"Attribute '{pair[1]}' not found for MSI diagnostic "
                     f"'{self.device_name}', continuing with mapping")

        # initialize 'shape'
        # - this is used by HDFReadMSI
        self._configs["shape"] = ()

        # initialize 'shotnum'
        self._configs["shotnum"] = {
            "dset paths": (),
            "dset field": ("Shot number", ),
            "shape": (),
            "dtype": np.int32,
        }

        # initialize 'signals'
        # - there is only one signal fields
        #   1. 'partial pressures'
        #
        self._configs["signals"] = {
            "partial pressures": {
                "dset paths": (),
                "dset field": (),
                "shape": (),
                "dtype": np.float32,
            },
        }

        # initialize 'meta'
        self._configs["meta"] = {
            "shape": (),
            "timestamp": {
                "dset paths": (),
                "dset field": ("Timestamp", ),
                "shape": (),
                "dtype": np.float64,
            },
            "data valid - ion gauge": {
                "dset paths": (),
                "dset field": ("Ion gauge data valid", ),
                "shape": (),
                "dtype": np.int8,
            },
            "data valid - RGA": {
                "dset paths": [],
                "dset field": ("RGA data valid", ),
                "shape": (),
                "dtype": np.int8,
            },
            "fill pressure": {
                "dset paths": (),
                "dset field": ("Fill pressure", ),
                "shape": (),
                "dtype": np.float32,
            },
            "peak AMU": {
                "dset paths": (),
                "dset field": ("Peak AMU", ),
                "shape": (),
                "dtype": np.float32,
            },
        }

        # ---- update configs related to 'Gas pressure summary'     ----
        # - dependent configs are:
        #   1. 'shape'
        #   2. 'shotnum'
        #   3. all of 'meta'
        #
        dset_name = "Gas pressure summary"
        dset = self.group[dset_name]

        # define 'shape'
        expected_fields = [
            "Shot number",
            "Timestamp",
            "Ion gauge data valid",
            "RGA data valid",
            "Fill pressure",
            "Peak AMU",
        ]
        if dset.ndim == 1 and all(field in dset.dtype.names
                                  for field in expected_fields):
            self._configs["shape"] = dset.shape
        else:
            why = "'/Gas pressure summary' does not match expected shape"
            raise HDFMappingError(self.info["group path"], why=why)

        # update 'shotnum'
        self._configs["shotnum"]["dset paths"] = (dset.name, )
        self._configs["shotnum"]["shape"] = dset.dtype["Shot number"].shape

        # update 'meta/timestamp'
        self._configs["meta"]["timestamp"]["dset paths"] = (dset.name, )
        self._configs["meta"]["timestamp"]["shape"] = dset.dtype[
            "Timestamp"].shape

        # update 'meta/data valid - ion gauge'
        self._configs["meta"]["data valid - ion gauge"]["dset paths"] = (
            dset.name, )
        self._configs["meta"]["data valid - ion gauge"]["shape"] = dset.dtype[
            "Ion gauge data valid"].shape

        # update 'meta/data valid - RGA'
        self._configs["meta"]["data valid - RGA"]["dset paths"] = (dset.name, )
        self._configs["meta"]["data valid - RGA"]["shape"] = dset.dtype[
            "RGA data valid"].shape

        # update 'meta/fill pressure'
        self._configs["meta"]["fill pressure"]["dset paths"] = (dset.name, )
        self._configs["meta"]["fill pressure"]["shape"] = dset.dtype[
            "Fill pressure"].shape

        # update 'meta/peak AMU'
        self._configs["meta"]["peak AMU"]["dset paths"] = (dset.name, )
        self._configs["meta"]["peak AMU"]["shape"] = dset.dtype[
            "Peak AMU"].shape

        # ---- update configs related to 'RGA partial pressures'   ----
        # - dependent configs are:
        #   1. 'signals/partial pressures'
        #
        dset_name = "RGA partial pressures"
        dset = self.group[dset_name]
        self._configs["signals"]["partial pressures"]["dset paths"] = (
            dset.name, )

        # check 'shape'
        _build_success = True
        if dset.dtype.names is not None:
            # dataset has fields (it should not have fields)
            _build_success = False
        elif dset.ndim == 2:
            if dset.shape[0] == self._configs["shape"][0]:
                self._configs["signals"]["partial pressures"]["shape"] = (
                    dset.shape[1], )
            else:
                _build_success = False
        else:
            _build_success = False
        if not _build_success:
            why = "'/RGA partial pressures' does not match expected shape"
            raise HDFMappingError(self.info["group path"], why=why)
Exemple #13
0
    def _find_adc_connections(
        self, adc_name: str, config_group: h5py.Group
    ) -> Tuple[Tuple[int, Tuple[int, ...], Dict[str, Any]], ...]:
        """
        Determines active connections on the adc.

        :param adc_name: name of the analog-digital-converter
        :param config_group: HDF5 group object of the configuration
            group

        :return:

            Tuple of 3-element tuples where the 1st element of the
            nested tuple represents a connected *board* number, the 2nd
            element is a tuple of connected *channel* numbers for the
            *board*, and the 3rd element is a dictionary of adc setup
            values (*bit*, *clock rate*, etc.).
        """
        config_name = self._parse_config_name(
            os.path.basename(config_group.name))
        active = self.deduce_config_active_status(config_name)

        # initialize conn, brd, and chs
        # conn = list of connections
        # brd  = board number
        # chs  = list of connect channels of board brd
        #
        conn = []

        # Determine connected (brd, ch) combinations
        # scan thru board groups
        for board in config_group:
            # Is it a board group?
            if not bool(re.fullmatch(r'Boards\[\d+\]', board)):
                warn("'{}' does not match expected ".format(board) +
                     "board group name...not adding to mapping")
                continue

            # get board number
            brd_group = config_group[board]
            try:
                brd = brd_group.attrs['Board']
            except KeyError:
                raise HDFMappingError(
                    self.info['group path'],
                    "board number attribute 'Board' missing")

            # ensure brd is an int
            if not isinstance(brd, (int, np.integer)):
                warn("Board number is not an integer")
                continue
            elif brd < 0:
                warn("Board number is less than 0.")
                continue

            # ensure there's no duplicate board numbers
            if brd in [sconn[0] for sconn in conn]:
                why = ("HDF5 structure unexpected..." +
                       "'{}'".format(config_group.name) +
                       " defines duplicate board numbers")

                # error if active, else warn
                if active:
                    raise HDFMappingError(self.info['group path'], why=why)
                else:
                    warn(why)

                    # skip adding to conn list
                    continue

            # scan thru channel groups
            chs = []
            for ch_key in brd_group:
                # Is it a channel group?
                if not bool(re.fullmatch(r'Channels\[\d+\]', ch_key)):
                    warn("'{}' does not match expected ".format(board) +
                         "channel group name...not adding to mapping")
                    continue

                # get channel number
                ch_group = brd_group[ch_key]
                try:
                    ch = ch_group.attrs['Channel']
                except KeyError:
                    raise HDFMappingError(
                        self.info['group path'],
                        "Channel number attribute 'Channel' missing")

                # ensure ch is an int
                if not isinstance(ch, (int, np.integer)):
                    warn("Channel number is not an integer")
                    continue
                elif ch < 0:
                    warn("Channel number is less than 0.")
                    continue

                # define list of channels
                chs.append(ch)

            # ensure connected channels are unique
            if len(chs) != len(set(chs)):
                why = ("HDF5 structure unexpected..." +
                       "'{}'".format(brd_group.name) +
                       " does not define a unique set of channel " +
                       "numbers...not adding to `configs` dict")
                warn(why)

                # skip adding to conn list
                continue

            # ensure chs is not NULL
            if len(chs) == 0:
                why = ("HDF5 structure unexpected..." +
                       "'{}'".format(brd_group.name) +
                       " does not define any valid channel " +
                       "numbers...not adding to `configs` dict")
                warn(why)

                # skip adding to conn list
                continue

            # build subconn tuple with connected board, channels, and
            # acquisition parameters
            subconn = (brd, tuple(chs), {
                'bit': None,
                'clock rate': (None, 'MHz')
            })

            # add to all connections list
            conn.append(subconn)

        return tuple(conn)
Exemple #14
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # collect names of datasets and sub-groups
        subgroup_names = []
        dataset_names = []
        for key in self.group.keys():
            if isinstance(self.group[key], h5py.Dataset):
                dataset_names.append(key)
            if isinstance(self.group[key], h5py.Group):
                subgroup_names.append(key)

        # build self.configs
        for name in subgroup_names:
            # determine configuration name
            config_name = self._parse_config_name(name)

            # populate
            if bool(config_name):
                # initialize configuration name in the config dict
                # - add 'config group path'
                self._configs[config_name] = {
                    'config group path': self.group[name].name,
                }

                # determine if config is active
                self._configs[config_name]['active'] = \
                    self.deduce_config_active_status(config_name)

                # assign active adc's to the configuration
                self._configs[config_name]['adc'] = \
                    self._find_active_adcs(self.group[name])

                # define 'shotnum' entry
                #
                # Note:
                #   The original dataset shot number field was named
                #   'Shot'.  At some point (mid- to late- 00's) this
                #   field was renamed to 'Shot number'.
                #
                #   When the header dataset is reviewed by
                #   `_adc_info_second_pass()` the field name will be
                #   changed when appropriate.
                #
                self._configs[config_name]['shotnum'] = {
                    'dset field': ('Shot', ),
                    'shape': (),
                    'dtype': np.uint32,
                }

                # initialize adc info
                self._configs[config_name]['SIS 3301'] = \
                    self._adc_info_first_pass('SIS 3301',
                                              self.group[name])

                # update adc info with 'nshotnum' and 'nt'
                # - `construct_dataset_name` needs adc info to be seeded
                # - the following updates depend on
                #   construct_dataset_name
                #
                if self._configs[config_name]['active']:
                    self._configs[config_name]['SIS 3301'] = \
                        self._adc_info_second_pass(config_name,
                                                   'SIS 3301')
                else:
                    for conn in self._configs[config_name]['SIS 3301']:
                        conn[2].update({
                            'nshotnum': -1,
                            'nt': -1,
                        })

        # -- raise HDFMappingErrors                                 ----
        # no configurations found
        if not bool(self._configs):
            why = "there are no mappable configurations"
            raise HDFMappingError(self.info['group path'], why=why)

        # ensure there are active configs
        if len(self.active_configs) == 0:
            raise HDFMappingError(self.info['group path'],
                                  "there are not active configurations")

        # ensure active configs are not NULL
        for config_name in self.active_configs:
            config = self.configs[config_name]
            if len(config['SIS 3301']) == 0:
                raise HDFMappingError(
                    self.info['group path'],
                    "active configuration '{}'".format(config_name) +
                    " has no connected board and channels")
Exemple #15
0
    def _build_configs(self):
        """Build the :attr:`configs` dictionary"""
        # Assumptions:
        # 1. only one NI_XYZ drive was ever built, so there will always
        #    be only one configuration
        #    - naming configuration 'config01'
        # 2. there's only one dataset ever created 'Run time list'
        # 3. there can be multiple motion lists defined
        #    - each sub-group is a configuration for a different
        #      motion list
        #    - the name of the sub-group is the name of the motion list
        #
        # initialize configuration
        cname = "config01"
        self.configs[cname] = {}

        # check there are existing motion lists
        if len(self.subgroup_names) == 0:
            warn(
                f"{self.info['group path']}: no defining motion list groups exist"
            )

        # get dataset
        try:
            dset = self.group[self.construct_dataset_name()]
        except KeyError:
            why = f"Dataset '{self.construct_dataset_name()}' not found"
            raise HDFMappingError(self.info["group path"], why=why)

        # ---- define general config values                         ----
        self.configs[cname].update({
            "Note":
            "The 'r', 'theta', and 'phi' fields in the "
            "NI_XYZ data set are suppose to represent "
            "spherical coordinates of the probe tip with "
            "respect to the pivot point of the probe drive, "
            "but the current calculation and population of the"
            "fields is inaccurate.  For user reference, the "
            "distance between the probe drive pivot point and"
            "LaPD axis is (Lpp =) 58.771 cm.",
            "Lpp":
            58.771 * u.cm,
        })

        # ---- define motion list values                            ----
        self.configs[cname]["motion lists"] = {}

        # get sub-group names (i.e. ml names)
        _ml_names = []
        for name in self.group:
            if isinstance(self.group[name], h5py.Group):
                _ml_names.append(name)

        # a motion list group must have the attributes
        # Nx, Ny, Nz, dx, dy, dz, x0, y0, z0
        names_to_remove = []
        for name in _ml_names:
            if all(attr not in self.group[name].attrs
                   for attr in ("Nx", "Ny", "Nz", "dx", "dy", "dz", "x0", "y0",
                                "z0")):
                names_to_remove.append(name)
        if bool(names_to_remove):
            for name in names_to_remove:
                _ml_names.remove(name)

        # warn if no motion lists exist
        if not bool(_ml_names):
            why = "NI_XYZ has no identifiable motion lists"
            warn(why)

        # gather ML config values
        pairs = [
            ("Nx", "Nx"),
            ("Ny", "Ny"),
            ("Nz", "Nz"),
            ("dx", "dx"),
            ("dy", "dy"),
            ("dz", "dz"),
            ("fan_XYZ", "fan_XYZ"),
            ("max_ydrive_steps", "max_ydrive_steps"),
            ("min_ydrive_steps", "min_ydrive_steps"),
            ("max_zdrive_steps", "max_zdrive_steps"),
            ("min_zdrive_steps", "min_zdrive_steps"),
            ("x0", "x0"),
            ("y0", "y0"),
            ("z0", "z0"),
            ("port", "z_port"),
        ]
        for name in _ml_names:
            # initialize ML dictionary
            self.configs[cname]["motion lists"][name] = {}

            # add ML values
            for pair in pairs:
                try:
                    # get attribute value
                    val = self.group[name].attrs[pair[1]]

                    # condition value
                    if np.issubdtype(type(val), np.bytes_):
                        # - val is a np.bytes_ string
                        val = _bytes_to_str(val)
                    if pair[1] == "fan_XYZ":
                        # convert to boolean
                        if val == "TRUE":
                            val = True
                        else:
                            val = False

                    # assign val to configs
                    self.configs[cname]["motion lists"][name][pair[0]] = val
                except KeyError:
                    self.configs[cname]["motion lists"][name][pair[0]] = None

                    why = (f"Motion List attribute '{pair[1]}' not found for "
                           f"ML group '{name}'")
                    warn(why)

        # ---- define 'dset paths'                                  ----
        self.configs[cname]["dset paths"] = (dset.name, )

        # ---- define 'shotnum'                                     ----
        # check dset for 'Shot number' field
        if "Shot number" not in dset.dtype.names:
            why = f"Dataset '{dset.name}' is missing 'Shot number' field"
            raise HDFMappingError(self.info["group path"], why=why)

        # initialize
        self.configs[cname]["shotnum"] = {
            "dset paths": self.configs[cname]["dset paths"],
            "dset field": ("Shot number", ),
            "shape": dset.dtype["Shot number"].shape,
            "dtype": np.int32,
        }

        # ---- define 'state values'                                ----
        self._configs[cname]["state values"] = {
            "xyz": {
                "dset paths": self._configs[cname]["dset paths"],
                "dset field": ("x", "y", "z"),
                "shape": (3, ),
                "dtype": np.float64,
            },
        }

        # check dset for 'x', 'y' and 'z' fields
        fx = "x" not in dset.dtype.names
        fy = "y" not in dset.dtype.names
        fz = "z" not in dset.dtype.names
        if fx and fy and fz:
            why = f"Dataset '{dset.name}' missing fields 'x', 'y' and 'z'"
            raise HDFMappingError(self.info["group path"], why=why)
        elif fx or fy or fz:
            mlist = [("x", fx), ("y", fy), ("z", fz)]
            missf = ", ".join([val for val, bol in mlist if bol])
            why = f" Dataset '{dset.name}' missing field '{missf}'"
            warn(why)
Exemple #16
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # collect names of datasets and sub-groups
        subgroup_names = []
        dataset_names = []
        for key in self.group.keys():
            if isinstance(self.group[key], h5py.Dataset):
                dataset_names.append(key)
            if isinstance(self.group[key], h5py.Group):
                subgroup_names.append(key)

        # build self.configs
        for name in subgroup_names:
            # determine configuration name
            config_name = self._parse_config_name(name)

            # populate
            if bool(config_name):
                # initialize configuration name in the config dict
                # - add 'config group path'
                self._configs[config_name] = {
                    "config group path": self.group[name].name,
                }

                # determine if config is active
                self._configs[config_name]["active"] = self.deduce_config_active_status(
                    config_name
                )

                # assign active adc's to the configuration
                self._configs[config_name]["adc"] = self._find_active_adcs(
                    self.group[name]
                )

                # define 'shotnum' entry
                self._configs[config_name]["shotnum"] = {
                    "dset field": ("Shot number",),
                    "shape": (),
                    "dtype": np.uint32,
                }

                # initialize adc info
                for adc in self._configs[config_name]["adc"]:
                    self._configs[config_name][adc] = self._adc_info_first_pass(
                        config_name, adc
                    )

                # update adc info with 'nshotnum' and 'nt'
                # - `construct_dataset_name` needs adc info to be seeded
                # - the following updates depend on
                #   construct_dataset_name
                #
                for adc in self._configs[config_name]["adc"]:
                    if self._configs[config_name]["active"]:
                        self._configs[config_name][adc] = self._adc_info_second_pass(
                            config_name, adc
                        )
                    else:
                        for conn in self._configs[config_name][adc]:
                            conn[2].update(
                                {
                                    "nshotnum": -1,
                                    "nt": -1,
                                }
                            )
                """
                for adc in self._configs[config_name]['adc']:
                    for conn in self._configs[config_name][adc]:
                        if self._configs[config_name]['active']:
                            nshotnum, nt = self._get_dset_shape(
                                config_name, adc, conn)
                            conn[2].update({
                                'nshotnum': nshotnum,
                                'nt': nt
                            })
                        else:
                            conn[2].update({
                                'nshotnum': None,
                                'nt': None
                            })
                """

        # -- raise HDFMappingErrors                                 ----
        # no configurations found
        if not bool(self._configs):
            why = "there are no mappable configurations"
            raise HDFMappingError(self.info["group path"], why=why)

        # ensure there are active configs
        if len(self.active_configs) == 0:
            raise HDFMappingError(
                self.info["group path"], "there are not active configurations"
            )

        # ensure active configs are not NULL
        for config_name in self.active_configs:  # pragma: no branch
            config = self.configs[config_name]
            if len(config["adc"]) == 0:
                raise HDFMappingError(
                    self.info["group path"],
                    f"active configuration '{config_name}' has no active adc's",
                )

            adcs = list(config["adc"])
            for adc in config["adc"]:  # pragma: no branch
                if len(config[adc]) == 0:  # pragma: no branch
                    del config[adc]
                    adcs.remove(adc)
            if len(adcs) == 0:
                raise HDFMappingError(
                    self.info["group path"],
                    f"active configuration '{config_name}' has no mapped "
                    f"connections for any adc",
                )
            else:
                config["adc"] = tuple(adcs)
Exemple #17
0
    def _build_configs(self):
        """Builds the :attr:`configs` dictionary."""
        # check there are configurations to map
        if len(self.subgroup_names) == 0:
            why = 'has no mappable configurations'
            raise HDFMappingError(self._info['group path'], why=why)

        # build configuration dictionaries
        # - assume every sub-group represents a unique configuration
        #   to the control device
        # - the name of each sub-group is used as the configuration
        #   name
        # - assume all configurations are active (i.e. used)
        #
        for name in self.subgroup_names:
            # get configuration group
            cong = self.group[name]

            # get dataset
            try:
                dset = self.group[self.construct_dataset_name()]
            except KeyError:
                why = ("Dataset '" + self.construct_dataset_name() +
                       "' not found for configuration group '" + name + "'")
                raise HDFMappingError(self._info['group path'], why=why)

            # initialize _configs
            self._configs[name] = {}

            # ---- define general info values                       ----
            pairs = [('IP address', 'IP address'),
                     ('generator device', 'Generator type'),
                     ('GPIB address', 'GPIB address'),
                     ('initial state', 'Initial state'),
                     ('command list', 'Waveform command list')]
            for pair in pairs:
                try:
                    # get attribute value
                    val = cong.attrs[pair[1]]

                    # condition value
                    if pair[0] == 'command list':
                        # - val gets returned as a np.bytes_ string
                        # - split line returns
                        # - remove trailing/leading whitespace
                        #
                        val = val.decode('utf-8').splitlines()
                        val = tuple([cls.strip() for cls in val])
                    elif pair[0] in ('IP address', 'generator device',
                                     'initial state'):
                        # - val is a np.bytes_ string
                        #
                        val = val.decode('utf-8')
                    else:
                        # no conditioning is needed
                        # 'GPIB address' val is np.uint32
                        pass

                    # assign val to _configs
                    self._configs[name][pair[0]] = val
                except KeyError:
                    self._configs[name][pair[0]] = None
                    warn_str = ("Attribute '" + pair[1] +
                                "' not found in control device '" +
                                self.device_name + "' configuration group '" +
                                name + "'")
                    if pair[0] != 'command list':
                        warn_str += ", continuing with mapping"
                        warn(warn_str)
                    else:
                        why = ("Attribute '" + pair[1] +
                               "' not found for configuration group '" + name +
                               "'")
                        raise HDFMappingError(self._info['group path'],
                                              why=why)

            # ---- define 'dset paths'                              ----
            self._configs[name]['dset paths'] = (dset.name, )

            # ---- define 'shotnum'                                 ----
            # initialize
            self._configs[name]['shotnum'] = {
                'dset paths': self._configs[name]['dset paths'],
                'dset field': ('Shot number', ),
                'shape': dset.dtype['Shot number'].shape,
                'dtype': np.int32
            }

            # ---- define 'state values'                            ----
            # catch and suppress warnings only for initialization
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                try:
                    sv_state = self._construct_state_values_dict(
                        name, self._default_re_patterns)
                except KeyError:
                    sv_state = {}

            # initialize
            self._configs[name]['state values'] = sv_state \
                if bool(sv_state) \
                else self._default_state_values_dict(name)
Exemple #18
0
    def _build_configs(self):
        """Build the :attr:`configs` dictionary"""
        # Assumptions:
        # 1. only one NI_XZ drive was ever built, so there will always
        #    be only one configuration
        #    - naming configuration 'config01'
        # 2. there's only one dataset ever created 'Run time list'
        # 3. there can be multiple motion lists defined
        #    - each sub-group is a configuration for a different
        #      motion list
        #    - the name of the sub-group is the name of the motion list
        #
        # initialize configuration
        cname = "config01"
        self.configs[cname] = {}

        # check there are existing motion lists
        if len(self.subgroup_names) == 0:
            warn(
                f"{self.info['group path']}: no defining motion list groups exist"
            )

        # get dataset
        try:
            dset = self.group[self.construct_dataset_name()]
        except KeyError:
            why = f"Dataset '{self.construct_dataset_name()}' not found"
            raise HDFMappingError(self.info["group path"], why=why)

        # ---- define general config values                         ----
        # none exist

        # ---- define motion list values                            ----
        self.configs[cname]["motion lists"] = {}

        # get sub-group names (i.e. ml names)
        _ml_names = []
        for name in self.group:
            if isinstance(self.group[name], h5py.Group):
                _ml_names.append(name)

        # a motion list group must have the attributes
        # Nx, Nz, dx, dz, x0, z0
        names_to_remove = []
        for name in _ml_names:
            if all(attr not in self.group[name].attrs
                   for attr in ("Nx", "Ny", "dx", "dz", "x0", "z0")):
                names_to_remove.append(name)
        if bool(names_to_remove):
            for name in names_to_remove:
                _ml_names.remove(name)

        # warn if no motion lists exist
        if not bool(_ml_names):
            why = "NI_XZ has no identifiable motion lists"
            warn(why)

        # gather ML config values
        pairs = [
            ("Nx", "Nx"),
            ("Nz", "Nz"),
            ("dx", "dx"),
            ("dz", "dz"),
            ("fan_XZ", "fan_XZ"),
            ("max_zdrive_steps", "max_zdrive_steps"),
            ("min_zdrive_steps", "min_zdrive_steps"),
            ("x0", "x0"),
            ("z0", "z0"),
            ("port", "z_port"),
        ]
        for name in _ml_names:
            # initialize ML dictionary
            self.configs[cname]["motion lists"][name] = {}

            # add ML values
            for pair in pairs:
                try:
                    # get attribute value
                    val = self.group[name].attrs[pair[1]]

                    # condition value
                    if np.issubdtype(type(val), np.bytes_):
                        # - val is a np.bytes_ string
                        val = _bytes_to_str(val)
                    if pair[1] == "fan_XZ":
                        # convert to boolean
                        if val == "TRUE":
                            val = True
                        else:
                            val = False

                    # assign val to configs
                    self.configs[cname]["motion lists"][name][pair[0]] = val
                except KeyError:
                    self.configs[cname]["motion lists"][name][pair[0]] = None

                    why = (f"Motion List attribute '{pair[1]}' not found for "
                           f"ML group '{name}'")
                    warn(why)

        # ---- define 'dset paths'                                  ----
        self.configs[cname]["dset paths"] = (dset.name, )

        # ---- define 'shotnum'                                     ----
        # check dset for 'Shot number' field
        if "Shot number" not in dset.dtype.names:
            why = f"Dataset '{dset.name}' is missing 'Shot number' field"
            raise HDFMappingError(self.info["group path"], why=why)

        # initialize
        self.configs[cname]["shotnum"] = {
            "dset paths": self.configs[cname]["dset paths"],
            "dset field": ("Shot number", ),
            "shape": dset.dtype["Shot number"].shape,
            "dtype": np.int32,
        }

        # ---- define 'state values'                                ----
        self._configs[cname]["state values"] = {
            "xyz": {
                "dset paths": self._configs[cname]["dset paths"],
                "dset field": ("x", "", "z"),
                "shape": (3, ),
                "dtype": np.float64,
            },
        }

        # check dset for 'x' and 'z' fields
        fx = "x" not in dset.dtype.names
        fz = "z" not in dset.dtype.names
        if fx and fz:
            why = f"Dataset '{dset.name}' missing both field 'x' and 'z'"
            raise HDFMappingError(self.info["group path"], why=why)
        elif fx or fz:
            missf = "x" if fx else "z"
            why = f" Dataset '{dset.name}' missing field '{missf}'"
            warn(why)