Ejemplo n.º 1
0
    def __transferData(self, chan):
        ''' Returns the raw data pulled from the scope as time (seconds) and voltage (Volts)
            Args:
                chan (int): one channel at a time

            Returns:
                :mod:`data.Waveform`: a time, voltage paired signal

            Todo:
                Make this binary transfer to go even faster
        '''
        chStr = 'CH' + str(chan)
        self.setConfigParam('DATA:ENCDG', 'ASCII')
        self.setConfigParam('DATA:SOURCE', chStr)
        self.open()
        try:
            voltRaw = self.mbSession.query_ascii_values('CURV?')
        except pyvisa.VisaIOError as err:
            logger.error('Problem during query_ascii_values(\'CURV?\')')
            try:
                self.close()
            except pyvisa.VisaIOError:
                logger.error('Failed to close! %s', self.address)
            raise err
        self.close()
        return voltRaw
Ejemplo n.º 2
0
    def get_all_gpib_id(self, use_cached=True):
        """ Queries the host for all connected GPIB instruments, and
        queries their identities with ``instrID()``.

        Warning: This might cause your instrument to lock into remote mode.

        Args:
            use_cached (bool): query only if not cached, default True

        Returns:
            dict: dictionary with gpib addresses as keys and \
                identity strings as values.
        """
        gpib_resources = self.list_gpib_resources_info(use_cached=use_cached)
        if self.__cached_gpib_instrument_list is None:
            use_cached = False
        if use_cached:
            return self.__cached_gpib_instrument_list
        else:
            gpib_instrument_list = dict()
            logger.debug("Caching GPIB instrument list in %s", self)
            for gpib_address in gpib_resources.keys():
                visa_object = VISAObject(gpib_address, tempSess=True)
                try:
                    instr_id = visa_object.instrID()
                    gpib_instrument_list[gpib_address] = instr_id
                except pyvisa.VisaIOError as err:
                    logger.error(err)
            self.__cached_gpib_instrument_list = gpib_instrument_list
            return gpib_instrument_list
Ejemplo n.º 3
0
 def connection_present(connection=connection,
                        connections=self.lab.connections):
     if connection in connections:
         return True
     else:
         logger.error("Connection {} is not compatible with lab %s",
                      connection)
         return False
Ejemplo n.º 4
0
    def updateConnections(self, *connections):
        """ Updates connections between instruments and devices.

        A connection is a tuple with a pair of one-entry dictionaries, as such:

        .. code-block:: python

            conn = ({instr1: port1}, {instr2: port2})

        The code assumes that there can only be one connection per port.
        This method performs the following action:

            1. verifies that `port` is one of `instr.ports`. Otherwise raises
                a ``RuntimeError``.
            2. deletes any connection in ``lab.connections`` that has
                either ``{instr1: port1}`` or ``{instr1: port1}``, and
                logs the deleted connection as a warning.
            3. adds new connection

        Args:
            connections (tuple(dict)): connection to update
        """

        # Verify if ports are valid, otherwise do nothing.
        for connection in connections:
            for k1, v1 in connection.items():
                if v1 not in k1.ports:
                    logger.error("Port '%s' is not in '%s: %s'", v1, k1,
                                 k1.ports)
                    raise RuntimeError("Port '{}' is not in '{}: {}'".format(
                        v1, k1, k1.ports))

        # Remove old conflicting connections
        def check_if_port_is_not_connected(connection, k1, v1):
            for k2, v2 in connection.items():
                if (k1, v1) == (k2, v2):
                    logger.warning("Deleting existing connection %s.",
                                   connection)
                    return False
            return True

        for connection in connections:
            for k1, v1 in connection.items():
                connectioncheck2 = lambda connection: check_if_port_is_not_connected(
                    connection, k1, v1)
                self.connections[:] = [
                    x for x in self.connections if connectioncheck2(x)
                ]

        # Add new connections
        for connection in connections:
            if connection not in self.connections:
                self.connections.append(connection)
            else:
                logger.warning("Connection already exists: %s", connection)
        return True
Ejemplo n.º 5
0
def assertValidPlotType(plType, dims=None, swpClass=None):
    if plType not in availablePlots(dims, swpClass):
        errStr = ['Invalid plot type.']
        errStr.append(f'This sweep is a {dims}-dimensional {swpClass.__name__}.')
        if plType not in availablePlots():
            errStr.append(f'{plType} is not a valid plot type at all.')
        else:
            errStr.append(f'{plType} is not a valid plot type for this kind of sweep.')
        errStr.append('Available plots are: {}'.format(', '.join(availablePlots(dims, swpClass))))
        logger.error('\n'.join(errStr))
        raise KeyError(plType)
Ejemplo n.º 6
0
    def saveState(self, fname=None, save_backup=True):
        """ Saves the current lab, together with all its dependencies,
        to a JSON file.

        But first, it checks whether the file has the same hash as the
        previously loaded one. If file is not found, skip this check.

        If the labstate was created from scratch, save with ``_saveState()``.

        Args:
            fname (str or Path): file path to save
            save_backup (bool): saves a backup just in case, defaults to True.

        Raises:
            OSError: if there is any problem saving the file.
        """
        if fname is None:
            fname = self.filename
        try:
            loaded_lab = LabState.loadState(fname)
        except FileNotFoundError:
            logger.debug("File not found: %s. Saving for the first time.",
                         fname)
            self._saveState(fname, save_backup=False)
            return
        except JSONDecodeError:
            if os.stat(fname).st_size == 0:
                logger.warning("%s is empty. Saving for the first time.",
                               _filename)
                self._saveState(fname, save_backup=False)
                return
            else:
                raise

        if not self.__sha256__:
            logger.debug(
                "Attempting to compare fabricated labstate vs. preloaded one.")
            self.__sha256__ = self.__toJSON()["__sha256__"]
            logger.debug("self.__sha256__: %s", self.__sha256__)

        if loaded_lab == self:
            logger.debug("Detected no changes in labstate. Nothing to do.")
            return

        if loaded_lab.__sha256__ == self.__sha256__:
            self._saveState(fname, save_backup)
        else:
            logger.error(
                "%s's hash does not match with the one loaded in memory. Aborting save.",
                fname)
Ejemplo n.º 7
0
def test_JSONpickleableHard(hardFile):
    loaded = SomethingWithHardStuff.load(hardFile)
    original = SomethingWithHardStuff()

    for arrAttr in ['wArr', 'xArr', 'yArr', 'zArr']:
        if not np.all(getattr(original, arrAttr) == getattr(loaded, arrAttr)):
            logger.error(arrAttr)
            assert False
    assert loaded.aSpectrum == loaded.aSpectrum

    for funAttr in ['f_inModule', 'f_lambda', 'f_bound']:
        assert getattr(original, funAttr)(10) == getattr(loaded, funAttr)(10)
    # assert original.f_unbound(loaded, 10) == loaded.f_unbound(loaded, 10)
    assert np.all(original.f_library(10) == loaded.f_library(10))
Ejemplo n.º 8
0
def init_module(module):
    # do something that imports this module again
    empty_lab = False
    try:
        module.lab = module.LabState.loadState(_filename)
    except (OSError) as e:
        logger.error("%s: %s.", e.__class__.__name__, e)
        empty_lab = True
    except JSONDecodeError as e:
        if os.stat(_filename).st_size == 0:
            logger.warning("%s is empty.", _filename)
        else:
            logger.error("%s: %s is corrupted. %s.", e.__class__.__name__, _filename, e)
        empty_lab = True

    if empty_lab:
        logger.warning("Starting fresh new LabState(). "
                       "Save for the first time with lab._saveState()")
        module.lab = module.LabState()
Ejemplo n.º 9
0
    def gather(self, soakTime=None, autoSave=False, returnToStart=False):  # pylint: disable=arguments-differ
        ''' Perform the sweep

            Args:
                soakTime (None, float): wait this many seconds at the first point to let things settle
                autoSave (bool): save data on completion, if savefile is specified
                returnToStart (bool): If True, actuates everything to the first point after the sweep completes

            Returns:
                None
        '''
        # Initialize builders that start off with None grids
        if self.data is None:
            # oldData = None
            self.data = OrderedDict()
        else:
            # oldData = self.data.copy()
            for dKeySrc in (self.actuate, self.measure, self.parse):
                for dKey in dKeySrc.keys():
                    try:
                        del self.data[dKey]
                    except KeyError:
                        pass
        try:
            swpName = 'Generic sweep in ' + ', '.join(self.actuate.keys())
            prog = io.ProgressWriter(swpName, self.swpShape, **self.monitorOptions)

            # Soak at the first point
            if soakTime is not None:
                logger.debug('Soaking for %s seconds.', soakTime)
                for actuObj in self.actuate.values():
                    actuObj.function(actuObj.domain[0])
                time.sleep(soakTime)

            for index in np.ndindex(self.swpShape):
                pointData = OrderedDict()  # Everything that will be measured *at this index*

                for statKey, statMat in self.static.items():
                    pointData[statKey] = statMat[index]

                # Do the actuation, storing domain args and return values (if present)
                for iDim, actu in enumerate(self.actuate.items()):
                    actuKey, actuObj = actu
                    if actuObj.domain is None:
                        x = None
                    else:
                        x = actuObj.domain[index[iDim]]
                        pointData[actuKey] = x
                    if iDim == self.actuDims - 1 or index[iDim + 1] == 0 or actuObj.doOnEveryPoint:
                        y = actuObj.function(x)  # The actual function call occurs here
                        if y is not None:
                            pointData[actuKey + '-return'] = y

                # Do the measurement, store return values
                for measKey, measFun in self.measure.items():
                    pointData[measKey] = measFun()
                    # print('   Meas', measKey, ':', pointData[measKey])

                # Parse and store
                for parseKey, parseFun in self.parse.items():
                    try:
                        pointData[parseKey] = parseFun(pointData)
                    except KeyError as err:
                        if parseKey in self.parse.keys():
                            print('Parsing out of order.',
                                  'Parser', parseKey, 'depends on parser', err,
                                  'but is being executed first')
                        raise err

                # Insert point data into the full matrix data builder
                # On the first go through, initialize array of correct datatype
                for k, v in pointData.items():
                    if all(i == 0 for i in index):
                        if np.isscalar(v):
                            self.data[k] = np.zeros(self.swpShape, dtype=float)
                        else:
                            self.data[k] = np.empty(self.swpShape, dtype=object)
                    self.data[k][index] = v

                # Plotting during the sweep
                if self.monitorOptions['livePlot']:
                    if all(i == 0 for i in index):
                        axArr = None
                    axArr = self.plot(axArr=axArr, index=index)
                    flatIndex = np.ravel_multi_index(index, self.swpShape)
                    if flatIndex % self.monitorOptions['plotEvery'] == 0:
                        display.display(plt.gcf())
                        display.clear_output(wait=True)
                # Progress report
                prog.update()
            # End of the main loop

        except Exception as err:
            logger.error('Error while sweeping. Keeping data. %s', err)
            raise

        if returnToStart:
            for actuObj in self.actuate.values():
                actuObj.function(actuObj.domain[0])

        if autoSave:
            self.save()