Пример #1
0
def log(message,
        from_class=None,
        from_object=None,
        create_datafile=False,
        assert_datafile=False,
        level='info'):
    """Add a message to the NPLab log, stored in the current datafile.

        This function will put a message in the nplab_log group in the root of
        the current datafile (i.e. the HDF5 file returned by
        `nplab.current_datafile()`).  It is automatically timestamped and named.

        @param: from_class: The class (or a string containing it) relating to
        the message.  Automatically filled in if from_object is supplied.
        @param: from_object: The object originating the log message.  We save a
        string representing the object's ID (allows us to distinguish between
        concurrent instances).
        @param: create_datafile: By default, log messages are discarded before
        the datafile exists - specifying True here will attempt to create a
        new datafile (which may involve popping up a GUI).
        @param: assert_datafile: Set to true to raise an exception if there is
        no current datafile.
        @param: level: This can either be used to add a value of 'importance' 
        to the log, the default is 'info'. The other options are 'debug',
        'warn'(as in warning) and 'error', and 'critical'.

        Note that if you are calling this from an `Instrument` subclass you
        should consider using `self.log()` which automatically fills in the
        object and class fields.
        """
    try:
        if hasattr(from_object, '_logger'):
            getattr(from_object._logger, level)(message)
        df = nplab.current_datafile(create_if_none=create_datafile,
                                    create_if_closed=create_datafile)
        logs = df.require_group("nplab_log")
        logs.attrs['log_group'] = True
        dset = logs.create_dataset("entry_%d",
                                   data=np.string_(message),
                                   timestamp=True)
        #save the object and class if supplied.
        if from_object is not None:
            dset.attrs.create("object", np.string_("%x" % id(from_object)))
            dset.attrs['log_dset'] = True
            dset.attrs['level'] = level
            if from_class is None:
                #extract the class of the object if it's not specified
                try:
                    from_class = from_object.__class__
                except:
                    pass
        if from_class is not None:
            dset.attrs.create("class", np.string_(from_class))

    except Exception as e:
        #            print "Couldn't log to file: " + message
        #            print 'due to error', e
        if assert_datafile:
            print("Error saving log message - raising exception.")
            raise e
Пример #2
0
def test_saving(capsys, tmpdir):
    # test the auto-saving capabilities
    a = InstrumentA.get_instance()  #should create/get a valid instance
    nplab.datafile.set_current(str(tmpdir.join("temp.h5")), mode="w")
    df = nplab.current_datafile()
    for i in range(10):
        a.create_data_group('test',
                            attrs={
                                'creator': 'instrumentA',
                                'serial': i
                            })
    assert df['InstrumentA/test_9'].attrs.get(
        'serial') == 9, "data saving didn't work as expected"

    # test the bundled metadata is correctly saved
    data = a.bundle_metadata(np.zeros(100))
    d = a.create_dataset("test_bundled_metadata",
                         data=a.bundle_metadata(np.zeros(100)))
    for k, v in instrumentA_default_metadata.iteritems():
        assert v is None or d.attrs[k] == v

    out, err = capsys.readouterr()  #make sure this is clear
    d = a.create_dataset("test_bundled_metadata_bad",
                         data=a.bundle_metadata(
                             np.zeros(100), property_names=['bad_property']))
    assert "object" in d.attrs[
        'bad_property'], "Fallback to str() failed for bad metadata"
    out, err = capsys.readouterr()
    assert "Warning, metadata bad_property" in out, "Didn't get warning about bad_property: \n{}".format(
        out)

    df.close()
    def __init__(self, spec, shutter, experiment, parent=None):
        super(DumbIrradiationExperiment_Gui, self).__init__(parent)
        #Load ui code
        uic.loadUi('DumbIrradiationExperimentGui.ui', self)

        #grabbing the current H5PY and intiating the data_browser
        self.data_file = nplab.current_datafile()
        self.data_file_tab = self.replace_widget(self.DataBrowser_tab_layout,
                                                 self.DataBrowser_widget,
                                                 self.data_file.get_qt_ui())

        #setup spectrometer tab gui and widget
        self.spectrometer = spec
        self.Spectrometer_widget = self.replace_widget(
            self.Spectrometer_Layout, self.Spectrometer_widget,
            self.spectrometer.get_qt_ui(display_only=True))
        self.spectrometer_tab = self.replace_widget(
            self.Spectrometer_tab_Layout, self.Spectrometer_tab_widget,
            self.spectrometer.get_qt_ui())

        #Setting up stepper and Lockin widget
        # Display
        self.Experiment = experiment
        self.Experiment_controls_widget = self.replace_widget(
            self.Main_layout, self.Experiment_controls_widget,
            self.Experiment.get_qt_ui())
        #Shutter control widget
        self.shutter = shutter
        self.StageControls_widget = self.replace_widget(
            self.Main_layout, self.shutter_controls_widget,
            self.shutter.get_qt_ui())
Пример #4
0
 def get_root_data_folder(cls):
     """Return a sensibly-named data folder in the default file."""
     if nplab.datafile._use_current_group == True:
         if nplab.datafile._current_group != None:
             return nplab.datafile._current_group
     f = nplab.current_datafile()
     return f.require_group(cls.__name__)
def sequential_shack_hartmann(slm, snapshot_fn, spot, N, overlap=0.0, other_spots=[], pause=False,save=True):
    """Scan a spot over the SLM, recording intensities as we go"""
    results = ArrayWithAttrs(np.zeros((N, N, 3))) # For each position, find X,Y,I
    results.attrs['spot'] = spot
    results.attrs['n_apertures'] = N
    results.attrs['overlap'] = overlap
    results.attrs['other_spots'] = other_spots
    results.attrs['pause_between_spots'] = pause
    if pause:
        app = nplab.utils.gui.get_qt_app()
    for i in range(N):
        for j in range(N):
            slm.make_spots([spot + [float(i+0.5-N/2.0)/N, float(j+0.5-N/2.0)/N, 
                                (0.5 + overlap/2.0)/N, 0]] + other_spots)
            hdr = snapshot_fn()
            results[i,j,2] = hdr.sum()
            results[i,j,:2] = centroid(hdr)
            if pause:
                raw_input("spot %d, %d (hit enter for next)" % (i, j))
                app.processEvents()
            print '.',
    if save:
        dset = nplab.current_datafile().create_dataset("sequential_shack_hartmann_%d", data=results)
        return dset
    else:
        return results
Пример #6
0
 def new_hdf5_file(self):
     """Open a new HDF5 file and its databrowser GUI."""
     # close the datafile
     if self.df: self.df.close()
     # open new datafile
     self.df = nplab.current_datafile()
     # close the databrowser gui
     if self.df_gui: self.df_gui.close()
     # open new databrowser gui
     self.df_gui = self.df.show_gui(blocking=False)
     # update the file name on the camera gui
     self.FilePathLineEdit.setText(self.df.filename)
     print()
Пример #7
0
def test_logging_from_instrument(tmpdir):
    nplab.datafile.set_current(str(tmpdir.join("temp.h5")))
    df = nplab.current_datafile()
    assert df, "Error creating datafile!"

    instr = InstrumentA()

    instr.do_something()

    entry = df['nplab_log'].numbered_items("entry")[-1]
    assert entry.value == "doing something"
    assert entry.attrs.get('creation_timestamp') is not None
    assert entry.attrs.get('object') is not None
    assert entry.attrs.get('class') is not None

    df.close()
Пример #8
0
def test_long_log(tmpdir):
    nplab.datafile.set_current(str(tmpdir.join("temp_long.h5")))
    df = nplab.current_datafile()
    assert df, "Error creating datafile!"

    instr = InstrumentA()

    N = 1000
    for i in range(N):
        instr.do_something()
        print(i)
    #assert len(df['nplab_log'].keys()) == N #SLOW!
    assert df['nplab_log/entry_%d' % (N - 1)], "Last log entry was missing!"
    with pytest.raises(KeyError):
        df['nplab_log/entry_%d' % N]  #zero-indexet - this shouldn't exist!

    df.close()
Пример #9
0
def test_logging(tmpdir):
    nplab.datafile.set_current(str(tmpdir.join("temp.h5")))
    df = nplab.current_datafile()
    assert df, "Error creating datafile!"
    print(df)

    nplab.log("test log message", assert_datafile=True)  #make a log message
    df.flush()  #make sure the message makes it to the file...

    print(list(df['nplab_log'].keys()))
    entry = df['nplab_log'].numbered_items("entry")[-1]
    assert entry.value == "test log message"
    print(list(entry.attrs.keys()))
    assert entry.attrs.get('creation_timestamp') is not None

    nplab.log("test log message 2")  #make a log message
    assert len(df['nplab_log'].numbered_items("entry")) == 2

    df.close()
def focus_stack(N, dz, snap=None):
    """Shift the focus (using Zernike modes) and acquire images."""
    if snap is None:
        global cam
        snap = lambda: cam.color_image()[:,:,2]
    img = snap()
    focus_stack = ArrayWithAttrs(np.zeros((N,)+img.shape, dtype=img.dtype))
    for i, d in enumerate(np.linspace(-dz,dz,N)):
        z = zernike_coefficients.copy()
        z[1] += d
        slm.zernike_coefficients = z
        focus_stack[i,:,:] = snap()
    slm.zernike_coefficients=zernike_coefficients
    plt.figure()
    plt.imshow(focus_stack[:,240,:],aspect="auto")
    focus_stack.attrs["dz"]=dz
    focus_stack.attrs["zernike_coefficients"]=zernike_coefficients
    dset = nplab.current_datafile().create_dataset("zstack_%d",data=focus_stack)
    return dset
def focus_stack(N, dz, snap=None):
    """Shift the focus (using Zernike modes) and acquire images."""
    if snap is None:
        global cam
        snap = lambda: cam.color_image()[:, :, 2]
    img = snap()
    focus_stack = ArrayWithAttrs(np.zeros((N, ) + img.shape, dtype=img.dtype))
    for i, d in enumerate(np.linspace(-dz, dz, N)):
        z = zernike_coefficients.copy()
        z[1] += d
        slm.zernike_coefficients = z
        focus_stack[i, :, :] = snap()
    slm.zernike_coefficients = zernike_coefficients
    plt.figure()
    plt.imshow(focus_stack[:, 240, :], aspect="auto")
    focus_stack.attrs["dz"] = dz
    focus_stack.attrs["zernike_coefficients"] = zernike_coefficients
    dset = nplab.current_datafile().create_dataset("zstack_%d",
                                                   data=focus_stack)
    return dset
def sequential_shack_hartmann(slm,
                              snapshot_fn,
                              spot,
                              N,
                              overlap=0.0,
                              other_spots=[],
                              pause=False,
                              save=True):
    """Scan a spot over the SLM, recording intensities as we go"""
    results = ArrayWithAttrs(np.zeros(
        (N, N, 3)))  # For each position, find X,Y,I
    results.attrs['spot'] = spot
    results.attrs['n_apertures'] = N
    results.attrs['overlap'] = overlap
    results.attrs['other_spots'] = other_spots
    results.attrs['pause_between_spots'] = pause
    if pause:
        app = nplab.utils.gui.get_qt_app()
    for i in range(N):
        for j in range(N):
            slm.make_spots([
                spot + [
                    float(i + 0.5 - N / 2.0) / N,
                    float(j + 0.5 - N / 2.0) / N, (0.5 + overlap / 2.0) / N, 0
                ]
            ] + other_spots)
            hdr = snapshot_fn()
            results[i, j, 2] = hdr.sum()
            results[i, j, :2] = centroid(hdr)
            if pause:
                raw_input("spot %d, %d (hit enter for next)" % (i, j))
                app.processEvents()
            print '.',
    if save:
        dset = nplab.current_datafile().create_dataset(
            "sequential_shack_hartmann_%d", data=results)
        return dset
    else:
        return results
        #Setting up stepper and Lockin widget
        # Display
        self.Experiment = experiment
        self.Experiment_controls_widget = self.replace_widget(
            self.Main_layout, self.Experiment_controls_widget,
            self.Experiment.get_qt_ui())
        #Shutter control widget
        self.shutter = shutter
        self.StageControls_widget = self.replace_widget(
            self.Main_layout, self.shutter_controls_widget,
            self.shutter.get_qt_ui())


if __name__ == '__main__':
    from nplab.instrument.spectrometer import DummySpectrometer
    from nplab.instrument.shutter import DummyShutter

    spectrometer = DummySpectrometer()
    shutter = DummyShutter()

    experiment = DumbIrradiationExperiment()

    df = nplab.current_datafile()

    #    show_guis([spectrometer, shutter, experiment, df])
    app = get_qt_app()
    gui = DumbIrradiationExperiment_Gui(spec=spectrometer,
                                        shutter=shutter,
                                        experiment=experiment)
    gui.show()
Пример #14
0
import nplab
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import matplotlib as mpl
from mpl_toolkits.mplot3d import proj3d
import numpy as np


nplab.datafile.set_current(r"C:\Users\a-amb\OneDrive - University Of "
                           r"Cambridge\tests.hdf5", mode="r")
df = nplab.current_datafile()
ax = plt.axes(projection='3d')

for i in range(32):
    ser1 = df['AdaptiveHillWalk/AdaptiveHillWalk_54/brightness_{}'.format(i)][
           :, ...]
    length = ser1[:, 0].size
    colours = np.ones(length)
    ax.scatter(ser1[:, 1], ser1[:, 2], ser1[:, 3], s=ser1[:, 4] / 10**(
        ser1[:, 8]/10) * 300000, label='{}'.format(ser1[0, 0]))

plt.xlabel('x')
plt.ylabel('y')

plt.legend()
plt.show()
plt.axis('equal')
print """
--------------------------------------------------------
Welcome to the general analysis code for HDF5 files produced by
the Ocean Optics Spectrometer
---------------------------------------------------------
"""
raw_input("Press any key to select HDF5 file >> ")

os.chdir(
    r"""C:\Users\alexc\OneDrive - University Of Cambridge\Documents\PhD Project\Projects"""
)

if nplab.datafile._current_datafile != None:
    nplab.datafile._current_datafile.close()
data_f = nplab.current_datafile(mode='r')
fname = data_f.filename

questions = [
    "Would you like to use default plot settings?",
    """Should we plot spectra taken over a range of time ('y') or with
             certain names ('n')?""",
    "OK, would you like to filter noise from the data? y/n",
    """OK, would you like to output an image with highlighted 
             nanoparticle for each spectrum?""",
    "Would you like to complete a gaussian fit on your data?",
    "Would you like to plot you data with line gradients?",
    "Would you like to plot the peak positions too?"
]

answers = [True] * len(questions)