def calibrate_grating(speed=3000, x_min=0 * u.mm, x_max=None, nx=10):
    spec = instrument(**bristol_params)
    # prepare data arrays
    if not x_max:
        x_max = (sm.limit_switch_2_pos -
                 sm.limit_switch_1_pos) * sm.travel_per_microstep

    x_comm = np.linspace(
        x_min.to(u.mm).magnitude,
        x_max.to(u.mm).magnitude, nx) * u.mm
    lm = np.empty(len(x_comm)) * u.nm
    # collect data
    for xind, x in enumerate(x_comm):
        print('Acquiring wavelength {} of {}...'.format(xind + 1, len(x_comm)))
        sm.go_and_wait(x, speed=speed)
        lm[xind] = spec.get_wavelength()
        print('...found to be {:7.3f} nm'.format(lm[xind].to(u.nm).magnitude))
    # close spectrometer to end errors
    spec.close()

    # generate interpolation function
    grating_tuning_model = interp1d(lm, x_comm)

    # save data
    timestamp_str = datetime.strftime(datetime.now(), '%Y_%m_%d_%H_%M_%S')
    fname = 'IPG_SFTL_grating_calibration_' + timestamp_str + '.txt'
    np.savetxt(path.normpath(path.join(grating_calibration_save_dir, fname)),
               np.stack((x_comm.magnitude, lm.magnitude)))

    return x_comm, lm, grating_tuning_model
 def connect(self):
     paramsets = list_instruments()
     self.ccs = instrument(paramsets[0])
     log.info('CCS200/M Spectrometer CONNECTED')
     time.sleep(0.1)
     self.stage = ThorlabsStageWithStepMotors()
     self.stage.set_stage()
Example #3
0
        def switch_cam(event):
            nonlocal cam, which_cam
            cam.close()

            which_cam = not which_cam

            cam = instrument(names[which_cam])
            cam.start_live_video(framerate=10 * u.hertz)
Example #4
0
    def open_camera(self, serial=False):
        """Connect to a uc480 camera."""
        print('Attempting to connect to the camera...')
        if serial:
            print("Serial number: %s" % serial)
            self.camera = instrument(serial=serial)  # specified camera
        else:
            print("Available instruments:")
            print(list_instruments())
            self.camera = instrument('uc480')  # default camera

        # set the camera gui buttons
        self.reset_gui_with_camera()
        print('Camera connection successful.\n')
        self.find_cameras()

        # set camera window title
        self.image_widget.setWindowTitle(self.camera.serial +
                                         ' = uc480 camera serial no.')

        # set camera width and height labels
        self.CameraWidthLabel.setText(str(self.camera.max_width))
        self.CameraHeightLabel.setText(str(self.camera.max_height))

        # determine which camera parameters can be set
        try:
            self.camera.gamma = int(self.GammaNumberBox.value())
            self.set_gamma = True
        except:
            print("WARNING: Can't set gamma.\n")
            self.set_gamma = False
        try:
            self.camera.auto_whitebalance = self.AutoWhitebalanceCheckBox.checkState(
            )
            self.set_whitebalance = True
        except:
            print("WARNING: Can't set auto_whitebalance.\n")
            self.set_whitebalance = False

        # initialise the attributes dictionary
        self.attributes = dict()

        # take first image
        self.take_image()
Example #5
0
 def selected_camera(self):
     self.master.wait_window(self.master)
     if self.found_devices and self.success:
         idx = self.options.index(self.var.get())
         if str(self.paramsets[idx]) not in opened_instruments:
             opened_instruments[str(self.paramsets[idx])] = instrument(
                 self.paramsets[idx])
         return opened_instruments[str(self.paramsets[idx])]
     else:
         return None
    def __init__(self, instr_name):
        super(DriverNI_DAQ, self).__init__(instr_name)

        print "(DriverVISA.__init__('{0}'))".format(instr_name)
        ### Note: instr_name is not really used in this driver yet

        try:
            self.Resource = instrumental.instrument('NIDAQ')
        except:
            print "!!! (DriverNI_DAQ) 'instrumental.instrument('NIDAQ')' couldn't be called."

        self.physicalChannel = "Dev1/ai2"
        self.taskHandle = xxx_TaskHandle(0)
def get_spectrum(plot=True, save=True):
    spec = instrument(**bristol_params)
    lm, psd = spec.get_spectrum()
    spec.close()
    lm = lm.to(u.um)
    timestamp_str = datetime.strftime(datetime.now(), '%Y_%m_%d_%H_%M_%S')
    fname = 'IPG_SFTL_spectrum_' + timestamp_str + '.txt'
    if save:
        np.savetxt(path.normpath(path.join(spectra_save_dir, fname)),
                   np.stack((lm.magnitude, psd)))
    if plot:
        fig = plt.figure(figsize=(12, 12))
        ax = fig.add_subplot(111)
        ax.plot(lm, psd, 'C3')
        ax.grid()
        ax.set_xlabel('$\lambda$ [$\mu$m]')
        ax.set_ylabel('PSD [dBm/spectral bin]')
        if save:
            ax.set_title('data saved to file:\n' + fname)
        fig.tight_layout()
        plt.show()
    return lm, psd
def InitializeInstruments():
    """
    Initializes the camera and rotators to the desired names.
    TODO: Figure out how to set the camera to 'quantview' mode.

    Parameters
    ----------
    none
    
    Returns
    -------
    cam : object
        Named pyvcam camera object.
    A : object
        Named Instrumental instrument object.
    B : object
        Named Instrumental instrument object.
    C : object
        Named Instrumental instrument object.

    """
    pvc.init_pvcam()  # Initialize PVCAM
    cam = next(Camera.detect_camera())  # Use generator to find first camera
    cam.open()  # Open the camera.
    if cam.is_open == True:
        print("Camera open")
    else:
        print("Error: camera not found")
    try:
        A = instrument('A')  # try/except is used here to handle
    except:  # a bug in instrumental that requires
        A = instrument('A')  # this line to be run twice
    print("A.serial = " + A.serial)
    try:
        B = instrument('B')
    except:
        B = instrument('B')
    print("B.serial = " + B.serial)
    try:
        C = instrument('C')
    except:
        C = instrument('C')
    print("C.serial = " + C.serial)

    return cam, A, B, C
 def __enter__(self):
     spec = instrument(**bristol_params)
     self.inst = spec
     return spec
Example #10
0
# -*- coding: utf-8 -*-
from instrumental import instrument, Q_
from instrumental.tools import DataSession
from numpy import sqrt

# Fix for Python 2
try:
    input = raw_input
except NameError:
    pass

# Set up scope and measurements
scope = instrument('SCOPE_C')
scope.set_measurement_params(1, 'amplitude', channel=2)
scope.set_measurement_params(2, 'amplitude', channel=4)
scope.set_measurement_params(3, 'max', channel=3)
scope.set_measurement_params(4, 'mean', channel=3)

scope.enable_measurement_stats()
scope.set_measurement_nsamps(256)

# Create data-taking session
ds = DataSession('Session')

for i in range(3):
    meas = {}
    meas['poling'] = Q_(int(input('Please enter the poling region number: ')))
    meas['temp'] = Q_(float(input('Please enter the crystal temp in C: ')),
                      'degC')

    stats_ch2 = scope.read_measurement_stats(1)
Example #11
0
from instrumental import instrument, u
daq = instrument(nidaq_devname='Dev1')


def dim_matches(q1, q2):
    return q1.dimensionality == q2.dimensionality


def test_AI_read():
    val = daq.ai0.read()
    assert dim_matches(val, u.V)


def test_AI_read_array():
    ai = daq.ai0
    data = ai.read(n_samples=10, fsamp='1kHz')
    assert data[ai.path].shape == (10, )
    assert data['t'].shape == (10, )
    assert dim_matches(data[ai.path], u.V)
    assert dim_matches(data['t'], u.s)
Example #12
0
 def connect(self):
     paramsets = list_instruments()
     self.ccs = instrument(paramsets[0])
     time.sleep(0.1)
Example #13
0
def pytest_runtest_setup(item):
    global current_instrument
    inst_key = inst_key_from_item(item)
    params = instruments[inst_key]
    current_instrument = instrument(params)
def get_wavelength():
    spec = instrument(**bristol_params)
    lm = spec.get_wavelength()
    spec.close()
    return lm
Example #15
0
    hbox.addWidget(button)
    hbox.addWidget(btn_grab)

    # Assign layouts to widgets
    main_area.setLayout(vbox)
    button_area.setLayout(hbox)
    scroll_area.setLayout(QVBoxLayout())

    # Attach some child widgets directly
    win.setCentralWidget(main_area)

    return app, win, button, btn_grab, scroll_area


if __name__ == '__main__':
    cam = instrument('pxCam')  # Replace with your camera's alias

    with cam:
        app, win, ssbutton, btn_grab, scroll_area = create_window()
        camview = gui.CameraView(cam)
        scroll_area.setWidget(camview)

        ssbutton.running = False
        def start_stop():
            if not ssbutton.running:
                camview.start_video()
                ssbutton.setText("Stop Video")
                ssbutton.running = True
            else:
                camview.stop_video()
                ssbutton.setText("Start Video")
Example #16
0
def _run_cam(which_cam, verbose=False):

    names = ['ThorCam', 'ChamberCam']  # False, True
    ## https://instrumental-lib.readthedocs.io/en/stable/uc480-cameras.html ##
    cam = instrument(names[which_cam])

    ## Cam Live Stream ##
    cam.start_live_video(framerate=10 * u.hertz)
    exp_t = cam._get_exposure()

    ## Create Figure ##
    fig = plt.figure()
    ax1 = fig.add_subplot(1, 1, 1)

    ## Animation Frame ##
    def animate(i):
        if cam.wait_for_frame():
            im = cam.latest_frame()
            ax1.clear()
            ax1.imshow(im)

    ## Button: Automatic Exposure Adjustment ##
    def find_exposure(event):
        fix_exposure(cam, set_exposure, verbose)

    ## Button: Intensity Feedback ##
    def stabilize(event):  # Wrapper for Intensity Feedback function.
        im = cam.latest_frame()
        print(analyze_image(which_cam, im, 12, 1, True))
        # stabilize_intensity(which_cam, cam, verbose)

    def snapshot(event):
        im = cam.latest_frame()
        guess_image(which_cam, im, 12)

    def switch_cam(event):
        nonlocal cam, which_cam
        cam.close()

        which_cam = not which_cam

        cam = instrument(names[which_cam])
        cam.start_live_video(framerate=10 * u.hertz)

    # ## Button: Pause ##
    # def playback(event):
    #     if playback.running:
    #         spcm_dwSetParam_i32(self.hCard, SPC_M2CMD, M2CMD_CARD_STOP)
    #         playback.running = 0
    #     else:
    #         spcm_dwSetParam_i32(self.hCard, SPC_M2CMD, M2CMD_CARD_START | M2CMD_CARD_ENABLETRIGGER)
    #         playback.running = 1

    # playback.running = 1

    ## Slider: Exposure ##
    def adjust_exposure(exp_t):
        cam._set_exposure(exp_t * u.milliseconds)

    ## Button Construction ##
    axspos = plt.axes([0.56, 0.0, 0.13, 0.05])
    axstab = plt.axes([0.7, 0.0, 0.1, 0.05])
    # axstop = plt.axes([0.81, 0.0, 0.12, 0.05])
    axplot = plt.axes([0.81, 0.0, 0.09, 0.05])  ### !
    axswch = plt.axes([0.91, 0.0, 0.09, 0.05])
    axspar = plt.axes([0.14, 0.9, 0.73, 0.05])

    correct_exposure = Button(axspos, 'AutoExpose')
    stabilize_button = Button(axstab, 'Stabilize')
    # pause_play = Button(axstop, 'Pause/Play')
    plot_snapshot = Button(axplot, 'Plot')
    switch_cameras = Button(axswch, 'Switch')
    set_exposure = Slider(axspar,
                          'Exposure',
                          valmin=0.1,
                          valmax=MAX_EXP,
                          valinit=exp_t.magnitude)

    correct_exposure.on_clicked(find_exposure)
    stabilize_button.on_clicked(stabilize)
    # pause_play.on_clicked(playback)
    plot_snapshot.on_clicked(snapshot)
    switch_cameras.on_clicked(switch_cam)
    set_exposure.on_changed(adjust_exposure)

    ## Begin Animation ##
    _ = animation.FuncAnimation(fig, animate, interval=100)
    plt.show()
    plt.close(fig)
Example #17
0
#0) Runs on python 3.6.3 (does not work on python 3.7)
#1) install through setup.py https://github.com/mabuchilab/Instrumental
#2) needs pyvisa, cffi (messy installation perhaps depending on OS)
# [email protected]

import visa
rm = visa.ResourceManager()
rm.list_resources()

from instrumental import instrument, list_instruments
from instrumental.drivers.spectrometers.thorlabs_ccs import CCS

paramsets = list_instruments()
paramsets

ccs = instrument(paramsets[0])
ccs

ccs.get_device_info()

ccs.get_integration_time()

ccs.set_integration_time(integration_time='0.01 seconds', stop_scan=True)

ccs.stop_scan()

ccs.stop_and_clear()

ccs.reset()

ccs.start_single_scan()
 def ConnectCamera(self):
     self.cam = instrument(self.insts[0])
     print(self.cam)
Example #19
0
#!/usr/bin/env python3

%pylab inline
from PIL import Image, ImageOps
from io import BytesIO
import IPython.display
import time
from IPython.display import clear_output
from instrumental import instrument, list_instruments

paramsets = list_instruments()
for index, paramset in enumerate(paramsets):
    print(index,paramset['classname'])

thorcam = instrument(paramsets[0])
thorcam.set_auto_exposure(True)

def showarray(a, fmt='jpeg'):
    f = BytesIO()
    ImageOps.autocontrast(Image.fromarray(a)).save(f, fmt)
    IPython.display.display(IPython.display.Image(data=f.getvalue()))

try:
    while(True):
        # Capture frame-by-frame
        t1 = time.time()
        #thorcam.wait_for_frame()
        frame = thorcam.grab_image()
        # Convert the image from OpenCV BGR format to matplotlib RGB format
        # to display the image
        #frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
Example #20
0
fig, ax = plt.subplots()

slm.enable_blazed()

fps_ = 60

pmillis = int(round(time.time() * 1000))

num_of_frames = 0

inst = list_instruments()

print(inst)

cam = instrument(inst[0])

cam.master_gain = 1
cam.gain_boost = False

#cam.start_live_video(framerate = '60 hertz', exposure_time='0.05 millisecond')

current_coeffs = [0, 0, 0, 0, 0, 0,
                  0]  #-0.025, -0.5, 0, -.04, 0.1055, 0.1055, -0.04]
#[0.1338858,  -0.04361271,  0.0405758, -0.54168096, -0.05750595,
# -0.06092065,  0.27694177,  0.06299513, -0.14471439,  0.05366629,  0.09215891,
# -0.34178976,  0.39306562,  0.20678763,  0.08826728,  0.02828799, -0.02992248,
# -0.10104176, -0.07010867, -0.02751629]
#slm.set_zernike_coeffs([0, 0, 0, -0.3, 0, 0, -0.2, 0.25, 0.25, -0.2, 0, 0, 0], [0.75])
tmp = [0, 0, 0]
tmp.extend(current_coeffs)
#     sys.path.append(temp_mon_dir)

from xantrex import xhr_write  # control function for Xantrex XHR 300V supply via LabJack
import ipg_sftl_lib as ipg
from experiment_utilities import print_statusline
from sample_mount_temp_control import set_temp_and_wait, get_meas_temp, get_set_temp, set_set_temp
from plotting_imports import plt

# initialize IPG SFTL
ipg.init()

# connect to scopes
#scope0 = instrument(module='scopes.tektronix', classname='MSO_DPO_4000', visa_address=scope0_address) # DPO2024 above microscope, 4ch, 200MHz, 1.25M samples 1GS/s, USB
#scope1 = instrument(module='scopes.tektronix', classname='TDS_3000', visa_address='TCPIP0::171.64.84.205::INSTR') # TDS 3032 above laser, 2ch, 300MHz, 10k samples @ 2.5GS/s, ethernet
scope2 = instrument(module='scopes.tektronix',
                    classname='TDS_600',
                    visa_address='GPIB0::19::INSTR'
                    )  # TDS 654C, 4ch, 500MHz, 15k samples @ 5GS/s, GPIB
scope3 = instrument(
    module='scopes.tektronix',
    classname='TDS_3000',
    visa_address='TCPIP0::171.64.86.67::INSTR'
)  # TDS 3034B above microscope, borrowed from nate (what a guy, right?), 4ch, 300MHz, 10k samples @ 2.5GS/s, ethernet

# connect to NI DAQ for analog outputs on remote DAQ-server
daq = instrument(DAQ_name, server=DAQ_server)

## connect to function generators
# Tektronix AFG3102 (x2)
afg0_visa_address = 'TCPIP0::171.64.85.99::INSTR'  # Tektronix AFG3102, closer to Nate's side of lab, connected via ethernet, static IP, hostname ml-tek-afg-0.stanford.edu
afg1_visa_address = 'TCPIP0::171.64.85.108::INSTR'  # Tektronix AFG3102, closer to David's desk along wall, connected via ethernet, static IP, hostname ml-tek-afg-1.stanford.edu
# afg0 = instrument({'visa_address':afg0_visa_address,'module':'funcgenerators.tektronix'}) #need to specify classname (1/26/2019); potential error due usage of an older version of Instrumental.
Example #22
0
# -*- coding: utf-8 -*-
from instrumental import instrument, Q_
from instrumental.tools import DataSession
from numpy import sqrt

# Fix for Python 2
try: input = raw_input
except NameError: pass

# Set up scope and measurements
scope = instrument('SCOPE_C')
scope.set_measurement_params(1, 'amplitude', channel=2)
scope.set_measurement_params(2, 'amplitude', channel=4)
scope.set_measurement_params(3, 'max', channel=3)
scope.set_measurement_params(4, 'mean', channel=3)

scope.enable_measurement_stats()
scope.set_measurement_nsamps(256)

# Create data-taking session
ds = DataSession('Session')

for i in range(3):
    meas = {}
    meas['poling'] = Q_(int(input('Please enter the poling region number: ')))
    meas['temp'] = Q_(float(input('Please enter the crystal temp in C: ')), 'degC')

    stats_ch2 = scope.read_measurement_stats(1)
    stats_ch4 = scope.read_measurement_stats(2)

    meas['reference'] = stats_ch2['mean']
Example #23
0
# Dependancies: Instrumental-lib by Mabuchi Lab
# Documentation located at http://instrumental-lib.readthedocs.io/en/stable/index.html
# Can be installed via pip or by cloning the github package, see the above site.

# import matplotlib to be able to display the captured image.
from instrumental import instrument
from matplotlib import pyplot
from instrumental.drivers.cameras import uc480
import cv2
import time
paramsets = uc480.list_instruments()
# Assuming only one camera device is connected, the camera we want to connect
# to will be the only one in the instrument list.

camera = instrument(paramsets[0])

#initialize live camera feed
camera.start_live_video()
#this loop updates the image being shown
while camera.wait_for_frame():
    frame = camera.latest_frame()
    cv2.imshow('frame', frame)
    #wait 100 ms
    cv2.waitKey(100)
    #checks to see if the window was closed if closed exit loop to stop image from refreshing
    #without this the loop will automatically open a new window
    if cv2.getWindowProperty('frame', cv2.WND_PROP_VISIBLE) < 1: break
#I down loaded the source files for instrumental from git hum they are at C:\Users\ecestudent\Downloads\Instrumental-master
cv2.destroyAllWindows()
Example #24
0
import numpy as np
from instrumental import instrument, u, list_instruments
import time
from datetime import datetime
import pickle
import pprint
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
import os
from statistics import stdev
from tqdm import tqdm
from scipy.interpolate import interp1d
import nidaqmx
#%% initialize
try:
    C = instrument('C')
except:
    C = instrument('C')
print("C.serial = " + C.serial)
#C.offset = Offset*u.degree

if __name__ == '__main__':
    daq = instrument('daq')
    rm = pyvisa.ResourceManager()
    Pmeter = rm.open_resource('ASRL3::INSTR')
    MaiTai = rm.open_resource('ASRL1::INSTR')

#%%


def Shutter(op):
# from instrumental import Q_, u, instrument
# from scipy.optimize import curve_fit
# import matplotlib.pyplot as plt
# from datetime import datetime
#
# # open oscilloscope
# scope = instrument('DPO2024')
#
# def lorentzian(x,x0,gamma):
#     return ( gamma / ( 2 * np.pi ) ) / ( ( x - x0 )**2 + ( gamma / 2.0 )**2 )
#
# def transmission_trace(ch=2,f_sb=10*u.GHz,plot=True,save=False,t_bg_min_ind=0,t_bg_max_ind=1000,delta_t_min=100*u.usec):
#     t,V = scope.get_data(channel=ch)
#     dt = t[1]-t[0]
#     ind_max = np.argmax(V)
#     t_max = t[ind_max]
#     V_bg = np.mean(V[t_bg_min_ind:t_bg_max_ind])
#     V_norm = (V-V_bg).magnitude / np.max((V-V_bg).magnitude)

import numpy as np
import matplotlib.pyplot as plt
from instrumental import Q_, u, instrument
from datetime import datetime
import cavity_trace_fitting as fitting
scope = instrument('DPO2024')
t, V = scope.get_data(channel=2)
ind0 = 420000
ind1 = 780000
params = fitting.guided_trace_fit(t[ind0:ind1], V[ind0:ind1], 10 * u.GHz)
#plt.plot(t[ind0:ind1],V[ind0:ind1]); plt.show()
Example #26
0
    hbox.addWidget(button)
    hbox.addWidget(btn_grab)

    # Assign layouts to widgets
    main_area.setLayout(vbox)
    button_area.setLayout(hbox)
    scroll_area.setLayout(QVBoxLayout())

    # Attach some child widgets directly
    win.setCentralWidget(main_area)

    return app, win, button, btn_grab, scroll_area


if __name__ == '__main__':
    cam = instrument('pxCam')  # Replace with your camera's alias

    with cam:
        app, win, ssbutton, btn_grab, scroll_area = create_window()
        camview = gui.CameraView(cam)
        scroll_area.setWidget(camview)

        ssbutton.running = False
        def start_stop():
            if not ssbutton.running:
                camview.start_video()
                ssbutton.setText("Stop Video")
                ssbutton.running = True
            else:
                camview.stop_video()
                ssbutton.setText("Start Video")
                         filling_values=np.nan,
                         unpack=True)
temp_cov = Q_(data_cov[0, :], u.degC)
lm_cov = data_cov[1:, :] * u.nm
LM_cov = np.array([34, 34.8, 35.5, 35.8, 35.97
                   ]) * u.um  # poling periods of MSHG2600-1.0-40 PPLN crystal

## import my own SHG calibration data
current_poling_region = 1  # should be fixed when we have a stage to move the PPLN crystal between poling regions.
###############################################

### Open instruments
#spec = instrument(**bristol_params) can't do this here or you get tons of error messages
sm = instrument(module='motion.USMC',
                classname='USMC',
                id=0,
                version=b'2504',
                serial=b'0000000000006302')
#oc = instrument('OC')
oc = instrument(module='tempcontrollers.covesion',
                classname='OC',
                visa_address='ASRL4::INSTR')
#pwrmtr = instrument({'visa_address':pwrmtr_visa_address,'module':'powermeters.thorlabs'})

### function definitions

print_statusline = covesion.print_statusline

## define a decorator function to allow me to keep the Bristol open throughout
## functions defined here rather than openning and closing it over and over
## again.
Example #28
0
    def _run_cam(self, which_cam=None):
        """ Fires up the camera stream (ThorLabs UC480)

        Parameters
        ----------
        which_cam : bool
            Chooses between displaying the Pre- or Post- chamber ThorCams.
            Passing nothing returns a camera object for silent use (not displaying).

        Returns
        -------
        :obj:`instrumental.drivers.cameras.uc480`, None
            Only returns if no selection for `which_cam` is made.

        See Also
        --------
        `Camera Driver Documentation <https://instrumental-lib.readthedocs.io/en/stable/uc480-cameras.html>`_

        :doc:`Guide to GUI & camera use <../how-to/gui>`

        Notes
        -----
        .. todo:: Integrate button for saving optimized waveforms.
        """
        names = ['ThorCam', 'ChamberCam'
                 ]  # First one is default for stabilize_intensity(wav)
        cam = instrument(names[which_cam])

        ## Cam Live Stream ##
        cam.start_live_video(framerate=10 * u.hertz)

        ## No-Display mode ##
        if which_cam is None:
            return cam

        ## Create Figure ##
        fig = plt.figure()
        ax1 = fig.add_subplot(1, 1, 1)

        ## Animation Frame ##
        def animate(i):
            if cam.wait_for_frame():
                im = cam.latest_frame()
                ax1.clear()
                if which_cam:
                    im = im[300:501, 300:501]
                ax1.imshow(im)

        ## Button: Automatic Exposure Adjustment ##
        def find_exposure(event):
            fix_exposure(cam, set_exposure)

        ## Button: Intensity Feedback ##
        def stabilize(event):  # Wrapper for Intensity Feedback function.
            self.stabilize_intensity(self.Wave, which_cam, cam)

        def snapshot(event):
            im = cam.latest_frame()
            plot_image(which_cam, im, 12, guess=True)

        def switch_cam(event):
            nonlocal cam, which_cam
            cam.close()

            which_cam = not which_cam

            cam = instrument(names[which_cam])
            cam.start_live_video(framerate=10 * u.hertz)

        ## Slider: Exposure ##
        def adjust_exposure(exp_t):
            cam._set_exposure(exp_t * u.milliseconds)

        ## Button Construction ##
        correct_exposure = Button(plt.axes([0.56, 0.0, 0.13, 0.05]),
                                  'AutoExpose')
        stabilize_button = Button(plt.axes([0.7, 0.0, 0.1, 0.05]), 'Stabilize')
        plot_snapshot = Button(plt.axes([0.81, 0.0, 0.09, 0.05]), 'Plot')
        switch_cameras = Button(plt.axes([0.91, 0.0, 0.09, 0.05]), 'Switch')
        set_exposure = Slider(plt.axes([0.14, 0.9, 0.73, 0.05]), 'Exposure',
                              0.1, MAX_EXP, 20)

        correct_exposure.on_clicked(find_exposure)
        stabilize_button.on_clicked(stabilize)
        plot_snapshot.on_clicked(snapshot)
        switch_cameras.on_clicked(switch_cam)
        set_exposure.on_changed(adjust_exposure)

        ## Begin Animation ##
        _ = animation.FuncAnimation(fig, animate, interval=100)
        plt.show()
        cam.close()
        plt.close(fig)
        self._error_check()
Example #29
0
    def _run_cam(self, cam_name, verbose=False):
        """ Fires up the camera stream (ThorLabs UC480),
            then plots frames at a modifiable framerate in a Figure.
            Additionally, sets up special button functionality on the Figure.

        """
        ## https://instrumental-lib.readthedocs.io/en/stable/uc480-cameras.html ##
        ## ^^LOOK HERE^^ for driver documentation ##

        ## If you have problems here ##
        ## then see above doc &      ##
        ## Y:\E6\Software\Python\Instrument Control\ThorLabs UC480\cam_control.py ##
        if cam_name == 'ChamberCam':
            cam = instrument(cam_name)
        else:
            cam = instrument('ThorCam')

        ## Cam Live Stream ##
        cam.start_live_video(framerate=10 * u.hertz)
        exp_t = cam._get_exposure()

        ## Create Figure ##
        fig = plt.figure()
        ax1 = fig.add_subplot(1, 1, 1)

        ## Animation Frame ##
        def animate(i):
            if cam.wait_for_frame():
                im = cam.latest_frame()
                ax1.clear()
                ax1.imshow(im)

        ## Button: Automatic Exposure Adjustment ##
        def find_exposure(event):
            fix_exposure(cam, set_exposure, verbose)

        ## Button: Intensity Feedback ##
        def stabilize(event):  # Wrapper for Intensity Feedback function.
            self.stabilize_intensity(cam, verbose)

        ## Button: Pause ##
        def playback(event):
            if playback.running:
                spcm_dwSetParam_i32(self.hCard, SPC_M2CMD, M2CMD_CARD_STOP)
                playback.running = 0
            else:
                spcm_dwSetParam_i32(self.hCard, SPC_M2CMD, M2CMD_CARD_START | M2CMD_CARD_ENABLETRIGGER)
                playback.running = 1
        playback.running = 1

        ## Slider: Exposure ##
        def adjust_exposure(exp_t):
            cam._set_exposure(exp_t * u.milliseconds)

        ## Button Construction ##
        axspos = plt.axes([0.56, 0.0, 0.13, 0.05])
        axstab = plt.axes([0.7,  0.0, 0.1,  0.05])
        axstop = plt.axes([0.81, 0.0, 0.12, 0.05])
        axspar = plt.axes([0.14, 0.9, 0.73, 0.05])
        correct_exposure = Button(axspos, 'AutoExpose')
        stabilize_button = Button(axstab, 'Stabilize')
        pause_play       = Button(axstop, 'Pause/Play')
        set_exposure     = Slider(axspar, 'Exposure', valmin=0.1, valmax=80, valinit=exp_t.magnitude)
        correct_exposure.on_clicked(find_exposure)
        stabilize_button.on_clicked(stabilize)
        pause_play.on_clicked(playback)
        set_exposure.on_changed(adjust_exposure)

        ## Begin Animation ##
        _ = animation.FuncAnimation(fig, animate, interval=100)
        plt.show()
        plt.close(fig)
        self._error_check()