Пример #1
0
class DumbOpenCVCameraWithTimelapse(nplab.instrument.camera.opencv.OpenCVCamera
                                    ):
    timelapse_n = DumbNotifiedProperty(5)
    timelapse_dt = DumbNotifiedProperty(1.0)

    def take_timelapse_foreground(self,
                                  data_group=None,
                                  n=None,
                                  dt=None,
                                  update_progress=lambda p: p):
        if data_group is None:
            data_group = self.create_data_group("timelapse_%d")
        g = data_group
        for i in range(n):
            update_progress(i)
            time.sleep(dt)
            print "acquiring image {}".format(i)
            g.create_dataset("image_%d", data=self.color_image())

    def take_timelapse(self):
        run_function_modally(self.take_timelapse_foreground,
                             progress_maximum=self.timelapse_n,
                             data_group=self.create_data_group("timelapse_%d"),
                             n=self.timelapse_n,
                             dt=self.timelapse_dt)

    def get_control_widget(self):
        "Get a Qt widget with the camera's controls (but no image display)"
        return TimelapseCameraControlWidget(self)
Пример #2
0
class foo(object):
    a = DumbNotifiedProperty()
    b = DumbNotifiedProperty(10)
    @NotifiedProperty
    def c(self):
        return 99
    @c.setter
    def c(self, val):
        print("discarding {0}".format(val))
Пример #3
0
class OpenCVCameraWithTimelapse(nplab.instrument.camera.opencv.OpenCVCamera):
    timelapse_n = DumbNotifiedProperty(5)
    timelapse_dt = DumbNotifiedProperty(1.0)

    def take_timelapse(self, n=None, dt=None):
        n = self.timelapse_n
        dt = self.timelapse_dt
        print "starting timelapse with n:{}, dt:{}".format(n, dt)
        e = AcquireTimelapse()
        e.camera = self
        e.run_modally(n=n, dt=dt)
        print "function has finished"

    def get_control_widget(self):
        "Get a Qt widget with the camera's controls (but no image display)"
        return TimelapseCameraControlWidget(self)
Пример #4
0
class CameraWithLocationControlUI(QtWidgets.QWidget):
    """The control box for a CameraWithLocation"""
    calibration_distance = DumbNotifiedProperty(0)
    def __init__(self, cwl):
        super(CameraWithLocationControlUI, self).__init__()
        self.cwl = cwl
        cc = QuickControlBox("Settings")
        cc.add_doublespinbox("calibration_distance")
        cc.add_button("calibrate_xy_gui", "Calibrate XY")
        cc.auto_connect_by_name(self)
        self.calibration_controls = cc

        fc = QuickControlBox("Autofocus")
        fc.add_doublespinbox("af_step_size")
        fc.add_spinbox("af_steps")
        fc.add_button("autofocus_gui", "Autofocus")
        fc.add_button("quick_autofocus_gui", "Quick Autofocus")
        fc.auto_connect_by_name(self.cwl)
        self.focus_controls = fc

#        sc = 

        l = QtWidgets.QHBoxLayout()
        l.addWidget(cc)
        l.addWidget(fc)
        self.setLayout(l)

    def calibrate_xy_gui(self):
        """Run an XY calibration, with a progress bar in the foreground"""
        # 
        run_function_modally(self.cwl.calibrate_xy,
                             progress_maximum=7, step = None if self.calibration_distance<= 0 else float(self.calibration_distance))
Пример #5
0
 def __init__(self, spectrometer_list):
     assert False not in [isinstance(s, Spectrometer) for s in spectrometer_list],\
         'an invalid spectrometer was supplied'
     super(Spectrometers, self).__init__()
     self.spectrometers = spectrometer_list
     self.num_spectrometers = len(spectrometer_list)
     self._pool = ThreadPool(processes=self.num_spectrometers)
     self._wavelengths = None
     filename = DumbNotifiedProperty('spectra')
class DumbIrradiationExperiment(Experiment):
    """An example experiment that opens and closes a shutter, and takes spectra."""
    irradiation_time = DumbNotifiedProperty(1.0)
    wait_time = DumbNotifiedProperty(0.5)
    log_to_console = True

    def __init__(self):
        super(DumbIrradiationExperiment, self).__init__()

        self.shutter = Shutter.get_instance()
        self.spectrometer = Spectrometer.get_instance()

    def run(self):
        try:
            dg = self.create_data_group("irradiation_%d")
            while True:
                self.log("opening shutter")
                self.shutter.open_shutter()
                self.wait_or_stop(self.irradiation_time)
                self.shutter.close_shutter()
                self.log("closed shutter")
                self.wait_or_stop(self.wait_time)
                spectrum = self.spectrometer.read_spectrum(
                    bundle_metadata=True)
                dg.create_dataset("spectrum_%d", data=spectrum)
        except ExperimentStopped:
            pass  #don't raise an error if we just clicked "stop"
        finally:
            self.shutter.close_shutter(
            )  #close the shutter, important if we abort

    def get_qt_ui(self):
        """Return a user interface for the experiment"""
        gb = QuickControlBox("Irradiation Experiment")
        gb.add_doublespinbox("irradiation_time")
        gb.add_doublespinbox("wait_time")
        gb.add_button("start")
        gb.add_button("stop")
        gb.auto_connect_by_name(self)
        return gb
Пример #7
0
class ThorPM100(ThorlabsPM100, Instrument):
    num_averages = DumbNotifiedProperty()

    def __init__(self,
                 address='USB0::0x1313::0x8072::P2004571::0::INSTR',
                 num_averages=100,
                 calibration=None):
        """A quick wrapper to create a gui and bring in nplab features to the pm100 thorlabs power meter """
        rm = visa.ResourceManager()
        instr = rm.open_resource(address)
        super(ThorPM100, self).__init__(instr)
        self.num_averages = 100
        self.ui = None
        if calibration == None:
            self.calibration = 1.0
        else:
            self.calibration = calibration

    def read_average(self, num_averages=None):
        """a quick averaging tool for the pm100 power meter """
        if num_averages == None:
            num_averages = self.num_averages
        values = []
        for i in range(num_averages):
            values.append(self.read)
        return np.average(values)

    def read_average_power(self):
        """Return the average power including a calibration """
        return self.read_average() * self.calibration

    average_power = NotifiedProperty(read_average_power)

    def update_power(self):
        """Update the power in the gui """
        self.ui.controls['Power'].setText(str(self.average_power))

    def get_qt_ui(self):
        if self.ui == None:
            self.ui = ThorlabsPM100_widget(self)
        return self.ui

    def set_wavelength(self, wavelength):
        self.power_meter.sense.correction.wavelength = wavelength

    def get_wavelength(self):
        return self.sense.correction.wavelength

    wavelength = NotifiedProperty(get_wavelength, set_wavelength)
Пример #8
0
class PowerMeter(Instrument):
    '''
    brings basic nplab functionality, and a gui with live mode to a powermeter.
    The minimum you need to do to subclass this is overwrite the read_power method
    '''
    live = DumbNotifiedProperty(False)

    def __init__(self):
        Instrument.__init__(self)

    def read_power(self):
        raise NotImplementedError

    @property
    def power(self):
        return self.read_power()

    def get_qt_ui(self):
        return PowerMeterUI(self)
Пример #9
0
class CameraControlWidget(QtWidgets.QWidget, UiTools):
    """Controls for a camera (these are the really generic ones)"""
    def __init__(self, camera, auto_connect=True):
        assert isinstance(camera, Camera), "instrument must be a Camera"
        #TODO: better checking (e.g. assert camera has color_image, gray_image methods)
        super(CameraControlWidget, self).__init__()
        self.camera = camera
        self.load_ui_from_file(__file__, "camera_controls_generic.ui")
        if auto_connect == True:
            self.auto_connect_by_name(controlled_object=self.camera,
                                      verbose=False)

    def snapshot(self):
        """Take a new snapshot and display it."""
        self.camera.raw_image(update_latest_frame=True)

    def save_to_data_file(self):
        self.camera.save_raw_image(
            attrs={'description': self.description_lineedit.text()})

    def save_jpeg(self):
        cur_img = self.camera.color_image()
        fname = QtWidgets.QFileDialog.getSaveFileName(
            caption="Select JPEG filename",
            directory=os.path.join(
                os.getcwd(),
                datetime.date.today().strftime("%Y-%m-%d.jpg")),
            filter="Images (*.jpg *.jpeg)",
        )
        j = Image.fromarray(cur_img)
        j.save(fname)

    def edit_camera_parameters(self):
        """Pop up a camera parameters dialog box."""
        self.camera_parameters_widget = self.camera.get_parameters_widget()
        self.camera_parameters_widget.show()

    description = DumbNotifiedProperty("Description...")

    def __del__(self):
        pass
Пример #10
0
class Dummyflipper(Flipper):
    """A stub class to simulate a flipper"""
    _open = DumbNotifiedProperty(False)

    def __init__(self):
        """Create a dummy flipper object"""
        self._open = False
        super(Dummyflipper, self).__init__()

    def toggle(self):
        """toggle the state of the flipper"""
        self._open = not self._open

    def get_state(self):
        """Return the state of the flipper, a string reading 'open' or 'closed'"""
        return "Open" if self._open else "Closed"

    def set_state(self, value):
        """Set the state of the flipper (to open or closed)"""
        if isinstance(value, str):
            self._open = (value.lower() == "open")
        elif isinstance(value, bool):
            self._open = value
Пример #11
0
class Experiment(Instrument):
    """A class representing an experimental protocol.
    
    This base class is a subclass of Instrument, so it provides all the GUI
    code and data management that instruments have.  It's also got an
    improved logging mechanism, designed for use as a status display, and some
    template methods for running a long experiment in the background.
    """
    
    latest_data = DumbNotifiedProperty(doc="The last dataset/group we acquired")
    log_messages = DumbNotifiedProperty(doc="Log messages from the latest run")
    log_to_console = False
    experiment_can_be_safely_aborted = False # set to true if you want to suppress warnings about ExperimentStopped
    
    def __init__(self):
        """Create an instance of the Experiment class"""
        super(Experiment, self).__init__()
        self._stop_event = threading.Event()
        self._finished_event = threading.Event()
        self._experiment_thread = None
        self.log_messages = ""

    def prepare_to_run(self, *args, **kwargs):
        """This method is always run in the foreground thread before run()

        Use this method if you might need to pop up a GUI, for example.  The
        most common use of this would be to create a data group or to ensure
        the current data file exists - doing that in run() could give rise
        to nasty threading problems.  By default, it does nothing.

        The arguments are passed through from start() to here, so you should
        either use or ignore them as appropriate.  These are the same args
        as are passed to run(), so if one of the two functions requires an
        argument you should make sure the other won't fail if the same
        argument is passed to it (simple rule: accept *args, **kwargs in
        both, in addition to any arguments you might have).
        """
        pass

    def run(self, *args, **kwargs):
        """This method should be the meat of the experiment (needs overriden).
        
        This is where your experiment code goes.  Note that you should use
        `self.wait_or_stop()` to pause your experiment between readings, to
        allow the background thread to be stopped if necessary.
        
        If you set `self.latest_data`, this may be used to display your
        results in real time.  You can also use `self.log()` to output text
        describing the experiment's progress; this may be picked up and 
        displayed graphically or in the console.

        The arguments are passed through from start() to here, so you should
        either use or ignore them as appropriate.  These are the same args
        as are passed to run(), so if one of the two functions requires an
        argument you should make sure the other won't fail if the same
        argument is passed to it (simple rule: accept *args, **kwargs in
        both, in addition to any arguments you might have).
        """
        NotImplementedError("The run() method of an Experiment must be overridden!")
        
    def wait_or_stop(self, timeout, raise_exception=True):
        """Wait for the specified time in seconds.  Stop if requested.
        
        This waits for a given time, unless the experiment has been manually 
        stopped, in which case it will terminate the thread by raising an
        ExperimentStopped exception.  You should call this whenever your
        experiment is in a state that would be OK to stop, such as between
        readings.
        
        If raise_exception is False, it will simply return False when the
        experiment should stop.  This is appropriate if you want to use it in a
        while loop, e.g. ``while self.wait_or_stop(10,raise_exception=False):``
        
        You may want to explicitly handle the ExperimentStopped exception to
        close down cleanly.
        """
        if self._stop_event.wait(timeout):
            if raise_exception:
                raise ExperimentStopped()
        return True

    @background_action
    @locked_action
    def run_in_background(self, *args, **kwargs):
        """Run the experiment in a background thread.
        
        This is important in order to keep the GUI responsive.
        """
        self.log_messages = ""
        self._stop_event.clear()
        self._finished_event.clear()
        self.run(*args, **kwargs)
        self._finished_event.set()
        
    def start(self, *args, **kwargs):
        """Start the experiment running in a background thread.  See run_in_background."""
        assert self.running == False, "Can't start the experiment when it is already running!"
        self.prepare_to_run(*args, **kwargs)
        self._experiment_thread = self.run_in_background(*args, **kwargs)
        
    def stop(self, join=False):
        """Stop the experiment running, if supported.  May take a little while."""
        self._stop_event.set()
        if join:
            try:
                self._experiment_thread.join()
            except ExperimentStopped as e:
                if not self.experiment_can_be_safely_aborted:
                    raise e

    @property
    def running(self):
        """Whether the experiment is currently running in the background."""
        return background_actions_running(self)
    
    def log(self, message):
        """Log a message to the current HDF5 file and to the experiment's history"""
        self.log_messages += message + "\n"
        if self.log_to_console:
            print(message)
        super(Experiment, self).log(message)
Пример #12
0
class Image_Filter_box(Instrument):
    threshold = DumbNotifiedProperty()
    bin_fac = DumbNotifiedProperty()
    min_size = DumbNotifiedProperty()
    max_size = DumbNotifiedProperty()
    bilat_height = DumbNotifiedProperty()
    bilat_size = DumbNotifiedProperty()
    morph_kernel_size = DumbNotifiedProperty()
    show_particles = DumbNotifiedProperty()
    return_original_with_particles = DumbNotifiedProperty()

    def __init__(self,
                 threshold=40,
                 bin_fac=4,
                 min_size=2,
                 max_size=6,
                 bilat_size=3,
                 bilat_height=40,
                 morph_kernel_size=3):
        self.threshold = threshold
        self.bin_fac = bin_fac
        self.min_size = min_size
        self.max_size = max_size
        self.bilat_size = bilat_size
        self.bilat_height = bilat_height
        self.morph_kernel_size = morph_kernel_size
        self.filter_options = [
            'None', 'STBOC_with_size_filter', 'strided_rescale',
            'StrBiThresOpen'
        ]
        self.show_particles = False
        self.return_original_with_particles = False
        self.current_filter_index = 0
        self.update_functions = []

    def current_filter(self, image):
        if self.current_filter_proxy == None:
            return image
        else:
            return self.current_filter_proxy(image)

    def set_current_filter_index(self, filter_index):
        filter_name = self.filter_options[filter_index]
        self._filter_index = filter_index
        if filter_name == 'None':
            self.current_filter_proxy = None
        else:
            self.current_filter_proxy = getattr(self, filter_name)
        self._current_filter_str = filter_name

    def get_current_filter_index(self):
        return self._filter_index

    current_filter_index = NotifiedProperty(fget=get_current_filter_index,
                                            fset=set_current_filter_index)

    def STBOC_with_size_filter(self, g, return_centers=False):
        try:
            return STBOC_with_size_filter(
                g,
                bin_fac=self.bin_fac,
                bilat_size=self.bilat_size,
                bilat_height=self.bilat_height,
                threshold=self.threshold,
                min_size=self.min_size,
                max_size=self.max_size,
                morph_kernel_size=self.morph_kernel_size,
                show_particles=self.show_particles,
                return_original_with_particles=self.
                return_original_with_particles,
                return_centers=return_centers)
        except Exception as e:
            self.log('Image processing has failed due to: ' + str(e),
                     level='WARN')

    def strided_rescale(self, g):
        try:
            return strided_rescale(g, bin_fac=self.bin_fac)
        except Exception as e:
            self.log('Image processing has failed due to: ' + str(e),
                     level='WARN')

    def StrBiThresOpen(self, g):
        try:
            return StrBiThresOpen(g,
                                  bin_fac=self.bin_fac,
                                  bilat_size=self.bilat_size,
                                  bilat_height=self.bilat_height,
                                  threshold=self.threshold,
                                  morph_kernel_size=self.morph_kernel_size)
        except Exception as e:
            self.log('Image processing has failed due to: ' + str(e),
                     level='WARN')

    def connect_function_to_property_changes(self, function):
        #    print function
        for variable_name in vars(self.__class__):
            self.update_functions.append(function)
            if (type(getattr(self.__class__,
                             variable_name)) == DumbNotifiedProperty
                    or type(getattr(self.__class__,
                                    variable_name)) == NotifiedProperty):

                register_for_property_changes(self, variable_name,
                                              self.update_functions[-1])

    def get_qt_ui(self):
        return Camera_filter_Control_ui(self)
Пример #13
0
class Camera(Instrument):
    """Generic class for representing cameras.
    
    This should always be subclassed in order to make a useful instrument.
    
    The minimum you should do is alter raw_snapshot to acquire and return a
    frame from the camera.  All other acquisition functions can come from that.
    If your camera also supports video streaming (for live view, for example)
    you should override     
    """

    video_priority = DumbNotifiedProperty(False)
    filename = DumbNotifiedProperty('snapshot_%d')
    """Set video_priority to True to avoid disturbing the video stream when
    taking images.  raw_snapshot may ignore the setting, but get_image and by
    extension rgb_image and gray_image will honour it."""

    parameters = None

    filter_function = None
    """This function is run on the image before it's displayed in live view.  
    It should accept, and return, an RGB image as its argument."""
    def __init__(self):
        super(Camera, self).__init__()
        self.acquisition_lock = threading.Lock()
        self._latest_frame_update_condition = threading.Condition()
        self._live_view = False
        self._frame_counter = 0
        # Ensure camera parameters get saved in the metadata.  You may want to override this in subclasses
        # to remove junk (e.g. if some of the parameters are meaningless)
        #        self.metadata_property_names = self.metadata_property_names + tuple(self.camera_parameter_names())
        self.metadata_property_names = tuple(
            self.metadata_property_names) + tuple(
                self.camera_parameter_names())
#       self.filename = 'snapshot_%d'

    def __del__(self):
        self.close()
#        super(Camera,self).__del__() #apparently not...?

    def close(self):
        """Stop communication with the camera and allow it to be re-used.
        
        override in subclass if you want to shut down hardware."""
        self.live_view = False

    def get_next_frame(self,
                       timeout=60,
                       discard_frames=0,
                       assert_live_view=True,
                       raw=True):
        """Wait for the next frame to arrive and return it.
        
        This function is mostly intended for acquiring frames from a video
        stream that's running in live view - it returns a fresh frame without
        interrupting the video.  If called with timeout=None when live view is
        false, it may take a very long time to return.
        
        @param: timeout: Maximum length of time to wait for a new frame.  None
        waits forever, but this may be a bad idea (could hang your script).
        @param: discard_frames: Wait for this many new frames before returning
        one.  This option is useful if the camera buffers frames, so you must
        wait for several frames to be acquired before getting a "fresh" one.
        The default setting of 0 means the first new frame that arrives is
        returned.
        @param: assert_live_view: If True (default) raise an assertion error if
        live view is not enabled - this function is intended only to be used
        when that is the case.
        @param: raw: The default (True) returns a raw frame - False returns the
        frame after processing by the filter function if any.
        """
        if assert_live_view:
            assert self.live_view, """Can't wait for the next frame if live view is not enabled!"""
        with self._latest_frame_update_condition:
            # We use the Condition object to block until a new frame appears
            # However we need to check that a new frame has actually been taken
            # so we use the frame counter.
            # NB the current implementation may be vulnerable to dropped frames
            # which will probably cause a timeout error.
            # Checking for frame_counter being >= target_frame is vulnerable to
            # overflow.
            target_frame = self._frame_counter + 1 + discard_frames
            expiry_time = time.time() + timeout
            while self._frame_counter != target_frame and time.time(
            ) < expiry_time:
                self._latest_frame_update_condition.wait(
                    timeout)  #wait for a new frame
            if time.time() >= expiry_time:
                raise IOError(
                    "Timed out waiting for a fresh frame from the video stream."
                )
            if raw:
                return self.latest_raw_frame
            else:
                return self.latest_frame

    def raw_snapshot(self):
        """Take a snapshot and return it.  No filtering or conversion."""
        raise NotImplementedError("Cameras must subclass raw_snapshot!")
        return True, np.zeros((640, 480, 3), dtype=np.uint8)

    def get_image(self):
        print "Warning: get_image is deprecated, use raw_image() instead."
        return self.raw_image()

    def raw_image(self, bundle_metadata=False, update_latest_frame=False):
        """Take an image from the camera, respecting video priority.
        
        If live view is enabled and video_priority is true, return the next
        frame in the video stream.  Otherwise, return a specially-acquired
        image from raw_snapshot.
        """
        frame = None
        if self.live_view and self.video_priority:
            frame = self.get_next_frame(raw=True)
        else:
            status, frame = self.raw_snapshot()
        if update_latest_frame:
            self.latest_raw_frame = frame
        # return it as an ArrayWithAttrs including self.metadata, if requested
        return self.bundle_metadata(frame, bundle_metadata)

    def color_image(self, **kwargs):
        """Get a colour image (bypass filtering, etc.)
        
        Additional keyword arguments are passed to raw_image."""
        frame = self.raw_image(**kwargs)
        try:
            assert frame.shape[2] == 3
            return frame
        except:
            try:
                assert len(frame.shape) == 2
                gray_frame = np.vstack(
                    (frame, ) * 3)  #turn gray into color by duplicating!
                if hasattr(frame, "attrs"):
                    return ArrayWithAttrs(gray_frame, attrs=frame.attrs)
                else:
                    return gray_frame
            except:
                raise Exception(
                    "Couldn't convert the camera's raw image to colour.")

    def gray_image(self, **kwargs):
        """Get a colour image (bypass filtering, etc.)
        
        Additional keyword arguments are passed to raw_image."""
        frame = self.raw_image(**kwargs)
        try:
            assert len(frame.shape) == 2
            return frame
        except:
            try:
                assert frame.shape[2] == 3
                return np.mean(frame, axis=2, dtype=frame.dtype)
            except:
                raise Exception(
                    "Couldn't convert the camera's raw image to grayscale.")

    def save_raw_image(self, update_latest_frame=True, attrs={}):
        """Save an image to the default place in the default HDF5 file."""
        d = self.create_dataset(self.filename,
                                data=self.raw_image(
                                    bundle_metadata=True,
                                    update_latest_frame=update_latest_frame))
        d.attrs.update(attrs)

    _latest_raw_frame = None

    @NotifiedProperty
    def latest_raw_frame(self):
        """The last frame acquired by the camera.  
        
        This property is particularly useful when
        live_view is enabled.  This is before processing by any filter function
        that may be in effect.  May be NxMx3 or NxM for monochrome.  To get a
        fresh frame, use raw_image().  Setting this property will update any
        preview widgets that are in use."""
        return self._latest_raw_frame

    @latest_raw_frame.setter
    def latest_raw_frame(self, frame):
        """Set the latest raw frame, and update the preview widget if any."""
        with self._latest_frame_update_condition:
            self._latest_raw_frame = frame
            self._frame_counter += 1
            self._latest_frame_update_condition.notify_all()

        # TODO: use the NotifiedProperty to do this with less code?
        if self._preview_widgets is not None:
            for w in self._preview_widgets:
                try:
                    w.update_image(self.latest_frame)
                except Exception as e:
                    print "something went wrong updating the preview widget"
                    print e

    @property
    def latest_frame(self):
        """The last frame acquired (in live view/from GUI), after filtering."""
        if self.filter_function is not None:
            return self.filter_function(self.latest_raw_frame)
        else:
            return self.latest_raw_frame

    def update_latest_frame(self, frame=None):
        """Take a new frame and store it as the "latest frame"
        
        Returns the image as displayed, including filters, etc.
        This should rarely be used - raw_image, color_image and gray_image are
        the preferred way of acquiring data.  If you supply an image, it will
        use that image as if it was the most recent colour image to be 
        acquired.
        
        Unless you need the filtered image, you should probably use 
        raw_image, color_image or gray_image.
        """
        if frame is None:
            frame = self.color_image()
        if frame is not None:
            self.latest_raw_frame = frame

            return self.latest_frame
        else:
            print "Failed to get an image from the camera"

    def camera_parameter_names(self):
        """Return a list of names of parameters that may be set/read.
        
        This will list the names of all the members of this class that are 
        `CameraParameter`s - you should define one of these for each of the 
        properties of the camera you'd like to expose.
        
        If you need to support dynamic properties, I suggest you use a class
        factory, and add CameraParameters at runtime.  You could do this from
        within the class, but that's a courageous move.
        
        If you need more sophisticated control, I suggest subclassing
        `CameraParameter`, though I can't currently see how that would help...
        """
        # first, identify all the CameraParameter properties we've got
        p_list = []
        for p in dir(self.__class__):
            try:
                if isinstance(getattr(self.__class__, p), CameraParameter):
                    getattr(self, p)
                    p_list.append(p)
            except:
                delattr(self.__class__, p)
                pass
        return p_list

#     return [p for p in dir(self.__class__)
#              if isinstance(getattr(self.__class__, p), CameraParameter)]

    def get_camera_parameter(self, parameter_name):
        """Return the named property from the camera"""
        raise NotImplementedError(
            "You must override get_camera_parameter to use it")

    def set_camera_parameter(self, parameter_name, value):
        """Return the named property from the camera"""
        raise NotImplementedError(
            "You must override set_camera_parameter to use it")

    _live_view = False

    @NotifiedProperty
    def live_view(self):
        """Whether the camera is currently streaming and displaying video"""
        return self._live_view

    @live_view.setter
    def live_view(self, live_view):
        """Turn live view on and off.
        
        This is used to start and stop streaming of the camera feed.  The
        default implementation just repeatedly takes snapshots, but subclasses
        are encouraged to override that behaviour by starting/stopping a stream
        and using a callback function to update self.latest_raw_frame."""
        if live_view == True:
            if self._live_view:
                return  # do nothing if it's going already.
            print "starting live view thread"
            try:
                self._frame_counter = 0
                self._live_view_stop_event = threading.Event()
                self._live_view_thread = threading.Thread(
                    target=self._live_view_function)
                self._live_view_thread.start()
                self._live_view = True
            except AttributeError as e:  #if any of the attributes aren't there
                print "Error:", e
        else:
            if not self._live_view:
                return  # do nothing if it's not running.
            print "stopping live view thread"
            try:
                self._live_view_stop_event.set()
                self._live_view_thread.join()
                del (self._live_view_stop_event, self._live_view_thread)
                self._live_view = False
            except AttributeError:
                raise Exception(
                    "Tried to stop live view but it doesn't appear to be running!"
                )

    def _live_view_function(self):
        """This function should only EVER be executed by _live_view_changed.
        
        Loop until the event tells us to stop, constantly taking snapshots.
        Ideally you should override live_view to start and stop streaming
        from the camera, using a callback function to update latest_raw_frame.
        """
        while not self._live_view_stop_event.wait(timeout=0.1):
            success, frame = self.raw_snapshot()
            self.update_latest_frame(frame)

    legacy_click_callback = None

    def set_legacy_click_callback(self, function):
        """Set a function to be called when the image is clicked.
        
        Warning: this is only for compatibility with old code and will be removed
        once camera_stage_mapper is updated!
        """
        self.legacy_click_callback = function
        if self._preview_widgets is not None:
            for w in self._preview_widgets:
                w.add_legacy_click_callback(self.legacy_click_callback)

    _preview_widgets = None

    def get_preview_widget(self):
        """A Qt Widget that can be used as a viewfinder for the camera.
        
        In live mode, this is continuously updated.  It's also updated whenever
        a snapshot is taken using update_latest_frame.  Currently this returns
        a single widget instance - in future it might be able to generate (and
        keep updated) multiple widgets."""
        if self._preview_widgets is None:
            self._preview_widgets = WeakSet()
        new_widget = CameraPreviewWidget()
        self._preview_widgets.add(new_widget)
        if self.legacy_click_callback is not None:
            new_widget.add_legacy_click_callback(self.legacy_click_callback)
        return new_widget

    def get_control_widget(self):
        """Return a widget that contains the camera controls but no image."""
        return CameraControlWidget(self)

    def get_parameters_widget(self):
        """Return a widget that controls the camera's settings."""
        return CameraParametersWidget(self)

    def get_qt_ui(self, control_only=False, parameters_only=False):
        """Create a QWidget that controls the camera.
        
        Specifying control_only=True returns just the controls for the camera.
        Otherwise, you get both the controls and a preview window.
        """
        if control_only:
            return self.get_control_widget()
        elif parameters_only:
            return self.get_parameters_widget(self)
        else:
            return CameraUI(self)
Пример #14
0
class Experiment(Instrument):
    """A class representing an experimental protocol.
    
    This base class is a subclass of Instrument, so it provides all the GUI
    code and data management that instruments have.  It's also got an
    improved logging mechanism, designed for use as a status display, and some
    template methods for running a long experiment in the background.
    """

    latest_data = DumbNotifiedProperty(
        doc="The last dataset/group we acquired")
    log_messages = DumbNotifiedProperty(doc="Log messages from the latest run")
    log_to_console = False

    def __init__(self):
        """Create an instance of the Experiment class"""
        super(Experiment, self).__init__()
        self._stop_event = threading.Event()
        self.log_messages = ""

    def run(self, *args, **kwargs):
        """This method should be the meat of the experiment (needs overriden).
        
        This is where your experiment code goes.  Note that you should use
        `self.wait_or_stop()` to pause your experiment between readings, to
        allow the background thread to be stopped if necessary.
        
        If you set `self.latest_data`, this may be used to display your
        results in real time.  You can also use `self.log()` to output text
        describing the experiment's progress; this may be picked up and 
        displayed graphically or in the console.
        """
        raise NotImplementedError()

    def wait_or_stop(self, timeout, raise_exception=True):
        """Wait for the specified time in seconds.  Stop if requested.
        
        This waits for a given time, unless the experiment has been manually 
        stopped, in which case it will terminate the thread by raising an
        ExperimentStopped exception.  You should call this whenever your
        experiment is in a state that would be OK to stop, such as between
        readings.
        
        If raise_exception is False, it will simply return False when the
        experiment should stop.  This is appropriate if you want to use it in a
        while loop, e.g. ``while self.wait_or_stop(10,raise_exception=False):``
        
        You may want to explicitly handle the ExperimentStopped exception to
        close down cleanly.
        """
        if self._stop_event.wait(timeout):
            if raise_exception:
                raise ExperimentStopped()
        return True

    @background_action
    @locked_action
    def run_in_background(self, *args, **kwargs):
        """Run the experiment in a background thread.
        
        This is important in order to keep the GUI responsive.
        """
        self.log_messages = ""
        self._stop_event.clear()
        self.run(*args, **kwargs)

    def start(self):
        """Start the experiment running in a background thread.  See run_in_background."""
        assert self.running == False, "Can't start the experiment when it is already running!"
        self.run_in_background()

    def stop(self):
        """Stop the experiment running, if supported.  May take a little while."""
        self._stop_event.set()

    @property
    def running(self):
        """Whether the experiment is currently running in the background."""
        return background_actions_running(self)

    def log(self, message):
        """Log a message to the current HDF5 file and to the experiment's history"""
        self.log_messages += message + "\n"
        if self.log_to_console:
            print message
        super(Experiment, self).log(message)
Пример #15
0
class CameraWithLocation(Instrument):
    """
    A class wrapping a camera and a stage, allowing them to work together.

    This is designed to handle the low-level stuff like calibration, crosscorrelation, and closed-loop stage control.
    It also handles autofocus, and has logic for drift correction.  It could compensate for a non-horizontal sample to
    some extent by adding a tilt to the image plane - but this is as yet unimplemented.
    """
    pixel_to_sample_displacement = None  # A 3x3 matrix that relates displacements in pixels to distance units
    pixel_to_sample_displacement_shape = None  # The shape of the images taken to calibrate the stage
    drift_estimate = None  # Reserved for future use, to compensate for drift
    datum_pixel = None  # The position, in pixels in the image, of the "datum point" of the system.
    settling_time = 0.0  # How long to wait for the stage to stop vibrating.
    frames_to_discard = 1  # How many frames to discard from the camera after a move.
    disable_live_view = DumbNotifiedProperty(
        False
    )  # Whether to disable live view while calibrating/autofocusing/etc.
    af_step_size = DumbNotifiedProperty(
        1)  # The size of steps to take when autofocusing
    af_steps = DumbNotifiedProperty(
        7)  # The number of steps to take during autofocus

    def __init__(self, camera=None, stage=None):
        # If no camera or stage is supplied, attempt to retrieve them - but crash with an exception if they don't exist.
        if camera is None:
            camera = Camera.get_instance(create=False)
        if stage is None:
            stage = Stage.get_instance(create=False)
        self.camera = camera
        self.stage = stage
        self.filter_images = False
        Instrument.__init__(self)

        shape = self.camera.color_image().shape
        self.datum_pixel = np.array(
            shape[:2]
        ) / 2.0  # Default to using the centre of the image as the datum point
        #     self.camera.set_legacy_click_callback(self.move_to_feature_pixel)
        self.camera.set_legacy_click_callback(self.move_to_pixel)

    @property
    def pixel_to_sample_matrix(self):
        here = self.datum_location
        assert self.pixel_to_sample_displacement is not None, "The CameraWithLocation must be calibrated before use!"
        datum_displacement = np.dot(ensure_3d(self.datum_pixel),
                                    self.pixel_to_sample_displacement)
        M = np.zeros(
            (4, 4)
        )  # NB M is never a matrix; that would create issues, as then all the vectors must be matrices
        M[0:3, 0:
          3] = self.pixel_to_sample_displacement  # We calibrate the conversion of displacements and store it
        M[3, 0:
          3] = here - datum_displacement  # Ensure that the datum pixel transforms to here.
        return M

    def _add_position_metadata(self, image):
        """Add position metadata to an image, assuming it has just been acquired"""
        iwl = ImageWithLocation(image)
        iwl.attrs['datum_pixel'] = self.datum_pixel
        iwl.attrs['stage_position'] = self.stage.position
        if self.pixel_to_sample_displacement is not None:
            #          assert iwl.shape[:2] == self.pixel_to_sample_displacement.shape[:2], "Image shape is not the same" \
            #                                                                               "as when we calibrated!" #These lines dont make much sense, the iwl has the size of the image while the martix is always only 3x3
            iwl.attrs['pixel_to_sample_matrix'] = self.pixel_to_sample_matrix
        else:
            iwl.attrs['pixel_to_sample_matrix'] = np.identity(4)
            print('Stage is not yet calbirated')
        return iwl

    ####### Wrapping functions for the camera #######
    def raw_image(self, *args, **kwargs):
        """Return a raw image from the camera, including position metadata"""
        return self._add_position_metadata(
            self.camera.raw_image(*args, **kwargs))

    def gray_image(self, *args, **kwargs):
        """Return a grayscale image from the camera, including position metadata"""
        return self._add_position_metadata(
            self.camera.gray_image(*args, **kwargs))

    def color_image(self, ignore_filter=False, *args, **kwargs):
        """Return a colour image from the camera, including position metadata"""
        image = self.camera.color_image(*args, **kwargs)
        if (ignore_filter == False and self.filter_images == True
                and self.camera.filter_function is not None):
            image = self.camera.filter_function(image)
        return self._add_position_metadata(image)

    def thumb_image(self, size=(100, 100)):
        """Return a cropped "thumb" from the CWL with size  """
        image = self.color_image()
        thumb = image[old_div(image.shape[0], 2) -
                      old_div(size[0], 2):old_div(image.shape[0], 2) +
                      old_div(size[0], 2),
                      old_div(image.shape[1], 2) -
                      old_div(size[1], 2):old_div(image.shape[1], 2) +
                      old_div(size[1], 2)]
        return thumb

    ###### Wrapping functions for the stage ######
    def move(self, *args, **kwargs):  # TODO: take account of drift
        """Move the stage to a given position"""
        self.stage.move(*args, **kwargs)

    def move_rel(self, *args, **kwargs):
        """Move the stage by a given amount"""
        self.stage.move_rel(*args, **kwargs)

    def move_to_pixel(self, x, y):

        iwl = ImageWithLocation(self.camera.latest_raw_frame)
        iwl.attrs['datum_pixel'] = self.datum_pixel
        #        self.use_previous_datum_location = True
        iwl.attrs['pixel_to_sample_matrix'] = self.pixel_to_sample_matrix
        if (iwl.pixel_to_sample_matrix != np.identity(4)).any():
            #check if the image has been calibrated
            #print('move coords', image.pixel_to_location([x,y]))
            #print('current position', self.stage.position)
            self.move(iwl.pixel_to_location([x, y]))
            #print('post move position', self.stage.position)


#        self.use_previous_datum_location = False

    @property
    def datum_location(self):
        """The location in the sample of the datum point (i.e. the current stage position, corrected for drift)"""
        if self.drift_estimate == None:
            return self.stage.position
        else:
            return self.stage.position - self.drift_estimate
        return self.stage.position - self.drift_estimate

    ####### Useful functions for closed-loop stage control #######
    def settle(self, flush_camera=True, *args, **kwargs):
        """Wait for the stage to stop moving/vibrating, and (unless specified) discard frame(s) from the camera.

        After moving the stage, to get a fresh image from the camera we usually need to both wait for the stage to stop
        vibrating, and discard one or more frames from the camera, so we have a fresh one.  This function does both of
        those things (except if flush_camera is False).
        """
        time.sleep(self.settling_time)
        for i in range(self.frames_to_discard):
            self.camera.raw_image(*args, **kwargs)

    def move_to_feature(self,
                        feature,
                        ignore_position=False,
                        ignore_z_pos=False,
                        margin=50,
                        tolerance=0.5,
                        max_iterations=10):
        """Bring the feature in the supplied image to the centre of the camera

        Strictly, what this aims to do is move the sample such that the datum pixel of the "feature" image is on the
        datum pixel of the camera.  It does this by first (unless instructed not to) moving to the datum point as
        defined by the image.  It then compares the image from the camera with the feature, and adjusts the position.

        feature : ImageWithLocation or numpy.ndarray
            The feature that we want to move to.
        ignore_position : bool (optional, default False)
            Set this to true to skip the initial move using the image's metadata.
        margin : int (optional)
            The maximum error, in pixels, that we can cope with (this sets the size of the search area we use to look
            for the feature in the camera image, it is (2*range + 1) in both X and Y.  Set to 0 to use the maximum
            possible search area (given by the difference in size between the feature image and the camera image)
        tolerance : float (optional)
            Once the error between our current position and the feature's position is below this threshold, we stop.
        max_iterations : int (optional)
            The maximum number of moves we make to fine-tune the position.
        """
        if (feature.datum_pixel[0] < 0
                or feature.datum_pixel[0] > np.shape(feature)[0]
                or feature.datum_pixel[1] < 0
                or feature.datum_pixel[1] > np.shape(feature)[1]):
            self.log(
                'The datum picture of the feature is outside of the image!',
                level='WARN')

        if not ignore_position:
            try:
                if ignore_z_pos == True:
                    self.move(
                        feature.datum_location[:2])  #ignore original z value
                else:
                    self.move(
                        feature.datum_location
                    )  #initial move to where we recorded the feature was
            except:
                print(
                    "Warning: no position data in feature image, skipping initial move."
                )
        image = self.color_image()
        assert isinstance(
            image, ImageWithLocation
        ), "CameraWithLocation should return an ImageWithLocation...?"

        last_move = np.infty
        for i in range(max_iterations):
            try:
                self.settle()
                image = self.color_image()
                pixel_position = locate_feature_in_image(image,
                                                         feature,
                                                         margin=margin,
                                                         restrict=margin > 0)
                #   pixel_position = locate_feature_in_image(image, feature,margin=margin)
                new_position = image.pixel_to_location(pixel_position)
                self.move(new_position)
                last_move = np.sqrt(
                    np.sum((new_position - image.datum_location
                            )**2))  # calculate the distance moved
                self.log(
                    "Centering on feature, iteration {}, moved by {}".format(
                        i, last_move))
                if last_move < tolerance:
                    break
            except Exception as e:
                self.log(
                    "Error centering on feature, iteration {} raised an exception:\n{}\n"
                    .format(i, e) +
                    "The feature size was {}\n".format(feature.shape) +
                    "The image size was {}\n".format(image.shape))
        if last_move > tolerance:
            self.log("Error centering on feature, final move was too large.")
        return last_move < tolerance

    def move_to_feature_pixel(self, x, y, image=None):
        if self.pixel_to_sample_matrix is not None:
            if image is None:
                image = self.color_image()
            feature = image.feature_at((x, y))
            self.last_feature = feature
            self.move_to_feature(feature)
        else:
            print('CameraWithLocation is not yet calibrated!!')

    def autofocus(self,
                  dz=None,
                  merit_function=af_merit_squared_laplacian,
                  method="centre_of_mass",
                  noise_floor=0.3,
                  exposure_factor=1.0,
                  use_thumbnail=False,
                  update_progress=lambda p: p):
        """Move to a range of Z positions and measure focus, then move to the best one.

        Arguments:
        dz : np.array (optional, defaults to values specified in af_step_size and af_steps
            Z positions, relative to the current position, to move to and measure focus.
        merit_function : function, optional
            A function that takes an image and returns a focus score, which we maximise.
        update_progress : function, optional
            This will be called each time we take an image - for use with run_function_modally.
        """
        self.camera.exposure = old_div(self.camera.exposure, exposure_factor)
        if dz is None:
            dz = (np.arange(self.af_steps) - old_div(
                (self.af_steps - 1), 2)) * self.af_step_size  # Default value
        here = self.stage.position
        positions = []  # positions keeps track of where we sample
        powers = []  # powers holds the value of the merit fn at each point
        camera_live_view = self.camera.live_view
        if self.disable_live_view:
            self.camera.live_view = False

        for step_num, z in enumerate(dz):
            self.stage.move(np.array([0, 0, z]) + here)
            self.settle()
            positions.append(self.stage.position)
            if use_thumbnail is True:
                image = self.thumb_image()
            else:
                image = self.color_image()
            powers.append(merit_function(image))
            update_progress(step_num)
        powers = np.array(powers)
        positions = np.array(positions)
        z = positions[:, 2]
        if method == "centre_of_mass":
            threshold = powers.min() + (powers.max() -
                                        powers.min()) * noise_floor
            weights = powers - threshold
            weights[weights < 0] = 0.  # zero out any negative values
            indices_of_maxima = argrelextrema(
                np.pad(weights, (1, 1), 'minimum'), np.greater)[0] - 1
            number_of_maxima = indices_of_maxima.size
            if (np.sum(weights) == 0):
                print(
                    "Warning, something went wrong and all the autofocus scores were identical! Returning to initial position."
                )
                new_position = here  # Return to initial position if something fails
            elif (number_of_maxima
                  == 1) and not (indices_of_maxima[0] == 0 or
                                 indices_of_maxima[-1] == (weights.size - 1)):
                new_position = old_div(np.dot(weights, positions),
                                       np.sum(weights))
            else:
                print(
                    "Warning, a maximum autofocus score could not be found. Returning to initial position."
                )
                new_position = here
        elif method == "parabola":
            coefficients = np.polyfit(z, powers, deg=2)  # fit a parabola
            root = old_div(
                -coefficients[1], (2 * coefficients[0])
            )  # p = c[0]z**" + c[1]z + c[2] which has max (or min) at 2c[0]z + c[1]=0 i.e. z=-c[1]/2c[0]
            if z.min() < root and root < z.max():
                new_position = [here[0], here[1], root]
            else:
                # The new position would have been outside the scan range - clip it to the outer points.
                new_position = positions[powers.argmax(), :]
        else:
            new_position = positions[powers.argmax(), :]
        self.stage.move(new_position)
        self.camera.live_view = camera_live_view
        update_progress(self.af_steps + 1)
        self.camera.exposure = self.camera.exposure * exposure_factor
        return new_position - here, positions, powers

    def quick_autofocus(self,
                        dz=0.5,
                        full_dz=None,
                        trigger_full_af=True,
                        update_progress=lambda p: p,
                        **kwargs):
        """Do a quick 3-step autofocus, performing a full autofocus if needed

        dz is a single number - we move this far above and below the current position."""
        shift, pos, powers = self.autofocus(np.array([-dz, 0, dz]),
                                            method="parabola",
                                            update_progress=update_progress)
        if np.linalg.norm(shift) >= dz and trigger_full_af:
            return self.autofocus(full_dz,
                                  update_progress=update_progress,
                                  **kwargs)
        else:
            return shift, pos, powers

    def autofocus_gui(self):
        """Run an autofocus using default parameters, with a GUI progress bar."""
        run_function_modally(self.autofocus,
                             progress_maximum=self.af_steps + 1)

    def quick_autofocus_gui(self):
        """Run an autofocus using default parameters, with a GUI progress bar."""
        run_function_modally(self.quick_autofocus,
                             progress_maximum=self.af_steps + 1)

    def calibrate_xy(self,
                     update_progress=lambda p: p,
                     step=None,
                     min_step=1e-5,
                     max_step=1000):
        """Make a series of moves in X and Y to determine the XY components of the pixel-to-sample matrix.

        Arguments:
        step : float, optional (default None)
            The amount to move the stage by.  This should move the sample by approximately 1/10th of the field of view.
            If it is left as None, we will attempt to auto-determine the step size (see below).
        min_step : float, optional
            If we auto-determine the step size, start with this step size.  It's deliberately tiny.
        max_step : float, optional
            If we're auto-determining the step size, fail if it looks like it's more than this.

        This starts by gingerly moving the stage a tiny amount.  That is repeated, increasing the distance exponentially
        until we see a reasonable movement.  This means we shouldn't need to worry too much about setting the distance
        we use for calibration.

        NB this currently assumes the stage deals with backlash correction for us.
        """
        #,bonus_arg = None,
        # First, acquire a template image:
        self.settle()
        starting_image = self.color_image()
        starting_location = self.datum_location
        w, h = starting_image.shape[:2]
        template = starting_image[int(w / 4):int(3 * w / 4),
                                  int(h / 4):int(3 * h / 4),
                                  ...]  # Use the central 50%x50% as template
        threshold_shift = w * 0.02  # Require a shift of at least 2% of the image's width ,changed s[0] to w
        target_shift = w * 0.1  # Aim for a shift of about 10%
        # Swapping images[-1] for starting_image
        assert np.sum((locate_feature_in_image(starting_image, template) -
                       self.datum_pixel)**2) < 1, "Template's not centred!"
        update_progress(1)
        if step is None:
            # Next, move a small distance until we see a shift, to auto-determine the calibration distance.
            step = min_step
            shift = 0
            while np.linalg.norm(shift) < threshold_shift:
                assert step < max_step, "Error, we hit the maximum step before we saw the sample move."
                self.move(starting_location + np.array([step, 0, 0]))
                image = self.color_image()
                shift = locate_feature_in_image(image,
                                                template) - image.datum_pixel
                if np.sqrt(np.sum(shift**2)) > threshold_shift:
                    break
                else:
                    step *= 10**(0.5)
            step *= old_div(
                target_shift, shift
            )  # Scale the amount we step the stage by, to get a reasonable image shift.
        update_progress(2)
        # Move the stage in a square, recording the displacement from both the stage and the camera
        pixel_shifts = []
        images = []
        for i, p in enumerate([[-step, -step, 0], [-step, step, 0],
                               [step, step, 0], [step, -step, 0]]):
            #          print 'premove'
            #        print starting_location,p
            self.move(starting_location + np.array(p))
            #        print 'post move'
            self.settle()
            image = self.color_image()
            pixel_shifts.append(-locate_feature_in_image(image, template) +
                                image.datum_pixel)
            images.append(image)
            # NB the minus sign here: we want the position of the image we just took relative to the datum point of
            # the template, not the other way around.
            update_progress(3 + i)
        # We then use least-squares to fit the XY part of the matrix relating pixels to distance
        # location_shifts = np.array([ensure_2d(im.datum_location - starting_location) for im in images])
        # Does this need to be the datum_location... will this really work for when the stage has not previously been calibrated
        location_shifts = np.array([
            ensure_2d(im.attrs['stage_position'] - starting_location)
            for im in images
        ])
        pixel_shifts = np.array(pixel_shifts)
        print(np.shape(pixel_shifts), np.shape(location_shifts))
        A, res, rank, s = np.linalg.lstsq(
            pixel_shifts,
            location_shifts)  # we solve pixel_shifts*A = location_shifts

        self.pixel_to_sample_displacement = np.zeros((3, 3))
        self.pixel_to_sample_displacement[
            2, 2] = 1  # just pass Z through unaltered
        self.pixel_to_sample_displacement[:2, :2] = A  # A deals with xy only
        fractional_error = np.sqrt(
            np.sum(res) / np.prod(pixel_shifts.shape)) / np.std(pixel_shifts)
        print(fractional_error)
        print(np.sum(res), np.prod(pixel_shifts.shape), np.std(pixel_shifts))
        if fractional_error > 0.02:  # Check it was a reasonably good fit
            print(
                "Warning: the error fitting measured displacements was %.1f%%"
                % (fractional_error * 100))
        self.log(
            "Calibrated the pixel-location matrix.\nResiduals were {}% of the shift.\nStage positions:\n{}\n"
            "Pixel shifts:\n{}\nResulting matrix:\n{}".format(
                fractional_error * 100, location_shifts, pixel_shifts,
                self.pixel_to_sample_displacement))
        update_progress(7)
        self.update_config('pixel_to_sample_displacement',
                           self.pixel_to_sample_displacement)
        return self.pixel_to_sample_displacement, location_shifts, pixel_shifts, fractional_error

    def load_calibration(self):
        """Acquire a new spectrum and use it as a reference."""
        self.pixel_to_sample_displacement = self.config_file[
            'pixel_to_sample_displacement'][:]

    def get_qt_ui(self):
        """Create a QWidget that controls the camera.

        Specifying control_only=True returns just the controls for the camera.
        Otherwise, you get both the controls and a preview window.
        """
        return CameraWithLocationUI(self)

    def get_control_widget(self):
        """Create a QWidget to control the CameraWithLocation"""
        return CameraWithLocationControlUI(self)
Пример #16
0
class Spectrometer(Instrument):

    metadata_property_names = ('model_name', 'serial_number',
                               'integration_time', 'reference', 'background',
                               'wavelengths', 'background_int',
                               'reference_int', 'variable_int_enabled',
                               'background_gradient', 'background_constant',
                               'averaging_enabled', 'absorption_enabled')

    variable_int_enabled = DumbNotifiedProperty(False)
    filename = DumbNotifiedProperty("spectrum")

    def __init__(self):
        super(Spectrometer, self).__init__()
        self._model_name = None
        self._serial_number = None
        self._wavelengths = None
        self.reference = None
        self.background = None
        self.background_constant = None
        self.background_gradient = None
        self.background_int = None
        self.reference_int = None
        #  self.variable_int_enabled = DumbNotifiedProperty(False)
        self.latest_raw_spectrum = None
        self.latest_spectrum = None
        self.averaging_enabled = False
        self.spectra_deque = deque(maxlen=1)
        self.absorption_enabled = False
        self._config_file = None

        self.stored_references = {}
        self.stored_backgrounds = {}
        self.reference_ID = 0
        self.spectra_buffer = np.zeros(0)
        self.data_file = df.current()
        self.curr_scan = None
        self.num_spectra = 1
        self.delay = 0
        self.time_series_name = 'time_series_%d'

    def __del__(self):
        try:
            self._config_file.close()
        except AttributeError:
            pass  #if it's not present, we get an exception - which doesn't matter.

    def open_config_file(self):
        """Open the config file for the current spectrometer and return it, creating if it's not there"""
        if self._config_file is None:
            f = inspect.getfile(self.__class__)
            d = os.path.dirname(f)
            self._config_file = DataFile(
                h5py.File(os.path.join(d, 'config.h5')))
            self._config_file.attrs['date'] = datetime.datetime.now().strftime(
                "%H:%M %d/%m/%y")
        return self._config_file

    config_file = property(open_config_file)

    def update_config(self, name, data, attrs=None):
        """Update the configuration file for this spectrometer.
        
        A file is created in the nplab directory that holds configuration
        data for the spectrometer, including reference/background.  This
        function allows values to be stored in that file."""
        f = self.config_file
        if name not in f:
            f.create_dataset(name, data=data, attrs=attrs)
        else:
            dset = f[name]
            dset[...] = data
            f.flush()

    def get_model_name(self):
        """The model name of the spectrometer."""
        if self._model_name is None:
            self._model_name = 'model_name'
        return self._model_name

    model_name = property(get_model_name)

    def get_serial_number(self):
        """The spectrometer's serial number (as a string)."""
        warnings.warn(
            "Using the default implementation for get_serial_number: this should be overridden!",
            DeprecationWarning)
        if self._serial_number is None:
            self._serial_number = 'serial_number'
        return self._serial_number

    serial_number = property(get_serial_number)

    def get_integration_time(self):
        """The integration time of the spectrometer (this function is a stub)!"""
        warnings.warn(
            "Using the default implementation for integration time: this should be overridden!",
            DeprecationWarning)
        return 0

    def set_integration_time(self, value):
        """Set the integration time of the spectrometer (this is a stub)!"""
        warnings.warn(
            "Using the default implementation for integration time: this should be overridden!",
            DeprecationWarning)
        print('setting 0')

    integration_time = property(get_integration_time, set_integration_time)

    def get_wavelengths(self):
        """An array of wavelengths corresponding to the spectrometer's pixels."""
        warnings.warn(
            "Using the default implementation for wavelengths: this should be overridden!",
            DeprecationWarning)

        if self._wavelengths is None:
            self._wavelengths = np.arange(400, 1200, 1)
        return self._wavelengths

    wavelengths = property(get_wavelengths)

    def read_spectrum(self, bundle_metadata=False):
        """Take a reading on the spectrometer and return it"""
        warnings.warn(
            "Using the default implementation for read_spectrum: this should be overridden!",
            DeprecationWarning)
        self.latest_raw_spectrum = np.zeros(0)
        return self.bundle_metadata(self.latest_raw_spectrum,
                                    enable=bundle_metadata)

    def read_background(self):
        """Acquire a new spectrum and use it as a background measurement.
        This background should be less than 50% of the spectrometer saturation"""
        if self.averaging_enabled == True:
            background_1 = np.average(self.read_averaged_spectrum(True, True),
                                      axis=0)
        else:
            background_1 = self.read_spectrum()
        self.integration_time = 2.0 * self.integration_time
        if self.averaging_enabled == True:
            background_2 = np.average(self.read_averaged_spectrum(True, True),
                                      axis=0)
        else:
            background_2 = self.read_spectrum()
        self.integration_time = self.integration_time / 2.0
        self.background_gradient = old_div((background_2 - background_1),
                                           self.integration_time)
        self.background_constant = background_1 - (self.integration_time *
                                                   self.background_gradient)
        self.background = background_1
        self.background_int = self.integration_time
        self.stored_backgrounds[self.reference_ID] = {
            'background_gradient': self.background_gradient,
            'background_constant': self.background_constant,
            'background': self.background,
            'background_int': self.background_int
        }
        self.update_config('background_gradient', self.background_gradient)
        self.update_config('background_constant', self.background_constant)
        self.update_config('background', self.background)
        self.update_config('background_int', self.background_int)

    def clear_background(self):
        """Clear the current background reading."""
        self.background = None
        self.background_gradient = None
        self.background_constant = None
        self.background_int = None

    def read_reference(self):
        """Acquire a new spectrum and use it as a reference."""
        if self.averaging_enabled == True:
            self.reference = np.average(self.read_averaged_spectrum(
                True, True),
                                        axis=0)
        else:
            self.reference = self.read_spectrum()
        self.reference_int = self.integration_time
        self.update_config('reference', self.reference)
        self.update_config('reference_int', self.reference_int)
        self.stored_references[self.reference_ID] = {
            'reference': self.reference,
            'reference_int': self.reference_int
        }

    def load_reference(self, ID):
        for attr in self.stored_backgrounds[ID]:
            setattr(self, attr, self.stored_backgrounds[ID][attr])
        for attr in self.stored_references[ID]:
            setattr(self, attr, self.stored_references[ID][attr])

    def clear_reference(self):
        """Clear the current reference spectrum"""
        self.reference = None
        self.reference_int = None

    def is_background_compensated(self):
        """Return whether there's currently a valid background spectrum"""
        return len(self.background)==len(self.latest_raw_spectrum) and \
            sum(self.background)>0

    def is_referenced(self):
        """Check whether there's currently a valid background and reference spectrum"""
        try:
            return self.is_background_compensated and \
                len(self.reference)==len(self.latest_raw_spectrum) and \
                sum(self.reference)>0
        except TypeError:
            return False

    def process_spectrum(self, spectrum):
        """Subtract the background and divide by the reference, if possible"""
        if self.background is not None:
            if self.reference is not None:
                old_error_settings = np.seterr(all='ignore')
                #     new_spectrum = (spectrum - (self.background-np.min(self.background))*self.integration_time/self.background_int+np.min(self.background))/(((self.reference-np.min(self.background))*self.integration_time/self.reference_int - (self.background-np.min(self.background))*self.integration_time/self.background_int)+np.min(self.background))
                if self.variable_int_enabled == True:
                    new_spectrum = (old_div(
                        (spectrum -
                         (self.background_constant +
                          self.background_gradient * self.integration_time)),
                        (old_div(
                            (self.reference -
                             (self.background_constant +
                              self.background_gradient * self.reference_int)) *
                            self.integration_time, self.reference_int))))
                else:
                    new_spectrum = old_div((spectrum - self.background),
                                           (self.reference - self.background))
                np.seterr(**old_error_settings)
                new_spectrum[np.isinf(
                    new_spectrum
                )] = np.NaN  #if the reference is nearly 0, we get infinities - just make them all NaNs.
            else:
                if self.variable_int_enabled == True:
                    new_spectrum = spectrum - (
                        self.background_constant +
                        self.background_gradient * self.integration_time)
                else:
                    new_spectrum = spectrum - self.background

        else:
            new_spectrum = spectrum
        if self.absorption_enabled == True:
            return np.log10(old_div(1, new_spectrum))
        return new_spectrum

    def read_processed_spectrum(self):
        """Acquire a new spectrum and return a processed (referenced/background-subtracted) spectrum.
        
        NB if saving data to file, it's best to save raw spectra along with metadata - this is a
        convenience method for display purposes."""
        if self.averaging_enabled == True:
            spectrum = np.average(self.read_averaged_spectrum(fresh=True),
                                  axis=0)
        else:
            spectrum = self.read_spectrum()
        self.latest_spectrum = self.process_spectrum(spectrum)
        return self.latest_spectrum

    def read(self):
        """Acquire a new spectrum and return a tuple of wavelengths, spectrum"""
        return self.wavelengths, self.read_processed_spectrum()

    def mask_spectrum(self, spectrum, threshold):
        """Return a masked array of the spectrum, showing only points where the reference
        is bright enough to be useful."""
        if self.reference is not None and self.background is not None:
            reference = self.reference - self.background
            mask = reference < reference.max() * threshold
            if len(spectrum.shape) > 1:
                mask = np.tile(mask, spectrum.shape[:-1] + (1, ))
            return ma.array(spectrum, mask=mask)
        else:
            return spectrum

    _preview_widgets = WeakSet()

    def get_qt_ui(self, control_only=False, display_only=False):
        """Create a Qt interface for the spectrometer"""
        if control_only:

            newwidget = SpectrometerControlUI(self)
            self._preview_widgets.add(newwidget)
            return newwidget
        elif display_only:
            return SpectrometerDisplayUI(self)
        else:
            return SpectrometerUI(self)

    def get_control_widget(self):
        """Convenience function """
        return self.get_qt_ui(control_only=True)

    def get_preview_widget(self):
        """Convenience function """
        return self.get_qt_ui(display_only=True)

    def save_spectrum(self, spectrum=None, attrs={}, new_deque=False):
        """Save a spectrum to the current datafile, creating if necessary.
        
        If no spectrum is passed in, a new spectrum is taken.  The convention
        is to save raw spectra only, along with reference/background to allow
        later processing.
        
        The attrs dictionary allows extra metadata to be saved in the HDF5 file."""
        if self.averaging_enabled == True:
            spectrum = self.read_averaged_spectrum(new_deque=new_deque)
        else:
            spectrum = self.read_spectrum() if spectrum is None else spectrum
        metadata = self.metadata
        metadata.update(attrs)  #allow extra metadata to be passed in
        self.create_dataset(self.filename, data=spectrum, attrs=metadata)
        #save data in the default place (see nplab.instrument.Instrument)
    def read_averaged_spectrum(self, new_deque=False, fresh=False):
        if fresh == True:
            self.spectra_deque.append(self.read_spectrum())
        if new_deque == True:
            self.spectra_deque.clear()
        while len(self.spectra_deque) < self.spectra_deque.maxlen:
            self.spectra_deque.append(self.read_spectrum())
        return self.spectra_deque

    def save_reference_to_file(self):
        pass

    def load_reference_from_file(self):
        pass

    def time_series(self,
                    num_spectra=None,
                    delay=None,
                    update_progress=lambda p: p):  # delay in ms
        if num_spectra is None:
            num_spectra = self.num_spectra
        if delay is None:
            delay = self.delay
        delay /= 1000
        update_progress(0)
        metadata = self.metadata
        extra_metadata = {
            'number of spectra': num_spectra,
            'spectrum end-to-start delay': delay
        }
        metadata.update(extra_metadata)
        to_save = []
        times = []
        start = time.time()
        for spectrum_number in range(num_spectra):
            times.append(time.time() - start)
            to_save.append(self.read_spectrum())  # should be a numpy array
            time.sleep(delay)
            update_progress(spectrum_number)
        metadata.update({'start times': times})
        self.create_dataset(self.time_series_name,
                            data=to_save,
                            attrs=metadata)
        to_return = ArrayWithAttrs(to_save, attrs=metadata)
        return to_return