Пример #1
0
class ScintillaDocument(Atom):
    """ An opaque class which represents a Scintilla text document.

    An instance of this class can be shared with multiple Scintilla
    widgets to enable multiple editor views on the same buffer, or
    to use multiple buffers with the same view.

    """
    #: A uuid which can be used as a handle by the toolkit backend.
    uuid = Constant(factory=lambda: uuid.uuid4().hex)
Пример #2
0
class ContentsConstrainableMixin(ConstrainableMixin):
    """ An atom mixin class which defines contents constraint members.

    This class implements the ContentsConstrainable interface.

    """
    #: The symbolic left contents boundary of the constrainable.
    contents_left = ConstraintMember()

    #: The symbolic right contents boundary of the constrainable.
    contents_right = ConstraintMember()

    #: The symbolic top contents boundary of the constrainable.
    contents_top = ConstraintMember()

    #: The symbolic bottom contents boundary of the constrainable.
    contents_bottom = ConstraintMember()

    #: A symbolic expression representing the content width.
    contents_width = Constant()

    #: A symbolic expression representing the content height.
    contents_height = Constant()

    #: A symbolic expression representing the content horizontal center.
    contents_h_center = Constant()

    #: A symbolic expression representing the content vertical center.
    contents_v_center = Constant()

    def _default_contents_width(self):
        return self.contents_right - self.contents_left

    def _default_contents_height(self):
        return self.contents_bottom - self.contents_top

    def _default_contents_h_center(self):
        return self.contents_left + 0.5 * self.contents_width

    def _default_contents_v_center(self):
        return self.contents_top + 0.5 * self.contents_height
Пример #3
0
class Tek5014(AWG, AWGDriver):
	numChannels = Int(default=4)
	seqFileExt = Constant('.awg')

	empty_channel_set = {'ch12':{}, 'ch34':{}, 'ch1m1':{}, 'ch1m2':{}, 'ch2m1':{}, 'ch2m2':{}, 'ch3m1':{}, 'ch3m2':{} , 'ch4m1':{}, 'ch4m2':{}}
	naming_convention = ['12', '34', '1m1', '1m2', '2m1', '2m2', '3m1', '3m2', '4m1', '4m2']

	def read_sequence_file(self,filename):
		return read_Tek_file(filename)

	def write_sequence_file(self,data, filename):
		write_Tek_file(data, filename,1)
Пример #4
0
class Natural_Constants(Check):
    h = Constant(h).tag(desc="Planck's constant",
                        unit="",
                        check_value=6.62606957e-34,
                        latex='h')

    hbar = Constant(hbar).tag(desc="Reduced Planck's constant",
                              check_value=1.054571726e-34,
                              unit='',
                              latex='\hbar')

    kB = Constant(kB).tag(desc="Boltzmann's constant",
                          check_value=1.3806488e-23,
                          unit='',
                          latex='k_B')

    e = Constant(e).tag(desc="charge of an electron",
                        check_value=1.60217657e-19,
                        unit="C",
                        latex='e')

    eps0 = Constant(eps0).tag(desc="permittivity of free space",
                              check_value=8.85418782e-12,
                              unit="F/m",
                              latex='\epsilon_0')

    pi = Constant(pi).tag(desc="ratio of circle's circumference to diameter",
                          check_value=3.141592654,
                          unit="",
                          latex='\pi')
Пример #5
0
class _Aux(HasPrefAtom):

    string = Unicode().tag(pref=True)
    float_n = Float().tag(pref=False)
    enum = Enum('a', 'b').tag(pref=True)
    enum_float = Enum(1.0, 2.0).tag(pref=True)
    list_ = List(Float()).tag(pref=True)
    dict_ = Dict(Unicode(), Float()).tag(pref=True)
    value = Value().tag(pref=True)
    const = Constant('r').tag(pref=True)

    atom = Typed(_Aaux, ()).tag(pref=True)

    no_tag = Int()
Пример #6
0
class _Aux(HasPrefAtom):

    string = Str().tag(pref=True)
    float_n = Float().tag(pref=True)
    enum = Enum('a', 'b').tag(pref=True)
    enum_float = Enum(1.0, 2.0).tag(pref=True)
    list_ = List(Float()).tag(pref=True)
    odict_ = Typed(OrderedDict,
                   ()).tag(pref=[ordered_dict_to_pref, ordered_dict_from_pref])
    value = Value().tag(pref=True)
    const = Constant('r').tag(pref=True)

    atom = Typed(_Aaux, ()).tag(pref=True)

    no_tag = Int()
Пример #7
0
class Tek7000(AWG):
    numChannels = Int(default=2)
    seqFileExt = Constant('.awg')

    empty_channel_set = {
        'ch12': {},
        'ch1m1': {},
        'ch1m2': {},
        'ch2m1': {},
        'ch2m2': {}
    }

    def read_sequence_file(self, filename):
        return read_Tek_file(filename)

    def write_sequence_file(self, data, filename):
        write_Tek_file(data, filename, 1)
Пример #8
0
class APS2(AWG):
    numChannels = Int(default=2)
    seqFileExt = Constant('.h5')

    empty_channel_set = {
        'ch12': {},
        'ch12m1': {},
        'ch12m2': {},
        'ch12m3': {},
        'ch12m4': {}
    }
    naming_convention = ['12', '12m1', '12m2', '12m3', '12m4']

    def read_sequence_file(self, filename):
        return read_APS2_file(filename)

    def write_sequence_file(self, data, filename):
        write_APS2_file(data, filename)
Пример #9
0
class APS(AWG):
    numChannels = Int(default=4)
    miniLLRepeat = Int(0).tag(desc='How many times to repeat each miniLL')
    seqFileExt = Constant('.h5')

    empty_channel_set = {
        'ch12': {},
        'ch34': {},
        'ch1m1': {},
        'ch2m1': {},
        'ch3m1': {},
        'ch4m1': {}
    }
    naming_convention = ['12', '34', '1m1', '2m1', '3m1', '4m1']

    def read_sequence_file(self, filename):
        return read_APS_file(filename)

    def write_sequence_file(self, data, filename):
        write_APS_file(data, filename)
Пример #10
0
        m = member

    assert A.m.delattr_mode[0] == DelAttr.NoOp
    a = A()
    a.m = 1
    del a.m
    assert a.m == 1
    assert A.m.do_delattr(a) is None
    assert a.m == 1


@pytest.mark.parametrize("member, mode",
                         [(Event(), DelAttr.Event), (Signal(), DelAttr.Signal),
                          (ReadOnly(), DelAttr.ReadOnly),
                          (Constant(1), DelAttr.Constant)
                          ])
def test_undeletable(member, mode):
    """Test that unsettable members do raise the proper error.

    """
    class Undeletable(Atom):

        m = member

    assert Undeletable.m.delattr_mode[0] == mode
    u = Undeletable()
    with pytest.raises(TypeError):
        del u.m
    with pytest.raises(TypeError):
        Undeletable.m.do_delattr(u)
Пример #11
0
    delegate_handler: not tested here (see test_delegate.py)
    property_handler: not tested here (see test_property.py)
    call_object_object_value_handler: use a custom function
    call_object_object_name_value_handler: use a custom function
    object_method_value_handler: use an object method
    object_method_name_value_handler: use an object method
    member_method_object_value_handler: method defined on a Member subclass

"""
import pytest

from atom.api import Atom, Constant, Int, ReadOnly, SetAttr, Signal


@pytest.mark.parametrize("member, mode", [(Signal(), "Signal"),
                                          (Constant(1), "Constant")])
def test_unsettable(member, mode):
    """Test that unsettable members do raise the proper error."""
    class Unsettable(Atom):

        m = member

    u = Unsettable()
    assert u.get_member("m").setattr_mode[0] == getattr(SetAttr, mode)
    with pytest.raises(TypeError) as excinfo:
        u.m = 1
    assert mode.lower() in excinfo.exconly()


@pytest.mark.parametrize("member, mode", [(Int(), "Slot"),
                                          (ReadOnly(), "ReadOnly")])
Пример #12
0
class BaseInterface(HasPrefAtom):
    """Base class to use for interfaces.

    The interface should not re-use member names used by the task to avoid
    issue when walking.

    This class should not be used directly, use one of its subclass.

    """
    #: Identifier for the build dependency collector
    dep_type = Constant(DEP_TYPE).tag(pref=True)

    #: Id of the interface preceded by the ids of all its anchors separated by
    # ':'. Used for persistence purposes.
    interface_id = Str().tag(pref=True)

    #: Dict of database entries added by the interface.
    database_entries = Dict()

    def check(self, *args, **kwargs):
        """Check that everything is alright before starting a measurement.

        By default tries to format all members tagged with 'fmt' and try to
        eval all members tagged with 'feval'. If the tag value is 'Warn', the
        test will considered passed but a traceback entry will be filled.

        """
        res = True
        traceback = {}
        task = self.task
        err_path = task.path + '/' + task.name
        for n, m in tagged_members(self, 'fmt').items():
            try:
                val = task.format_string(getattr(self, n))
                if n in self.database_entries:
                    task.write_in_database(n, val)
            except Exception:
                if m.metadata['fmt'] != 'Warn':
                    res = False
                msg = 'Failed to format %s : %s' % (n, format_exc())
                traceback[err_path + '-' + n] = msg

        for n, m in tagged_members(self, 'feval').items():
            val = m.metadata['feval']
            if not isinstance(val, validators.Feval):
                res = False
                msg = 'Feval validator is not a subclass validators.Feval'
            else:
                value, f_res, msg = val.check(self, n)
                res &= f_res

            if msg:
                traceback[err_path + '-' + n] = msg
            elif value is not None and n in self.database_entries:
                task.write_in_database(n, value)

        return res, traceback

    def prepare(self):
        """Prepare the interface to be performed.

        This method is called once by the parent task before starting the
        execution.

        """
        pass

    def perform(self, *args, **kwargs):
        """Method called by the parent perform method.

        """
        raise NotImplementedError()

    def traverse(self, depth=-1):
        """Method used by to retrieve information about a task.

        Parameters
        ----------
        depth : int
            How deep should we stop traversing.

        """
        yield self

    @classmethod
    def build_from_config(cls, config, dependencies):
        """ Create an interface using the provided dict.

        """
        interface = cls()
        interface.update_members_from_preferences(config)
        return interface
Пример #13
0
    class A(Atom):
        c = Constant(kind=int)

        def _default_c(self):
            return id(self)
Пример #14
0
    event_handler: not tested here (see test_observe.py)
    signal_handler
    delegate_handler: not tested here (see test_delegate.py)
    property_handler: not tested here (see test_property.py)
    call_object_object_value_handler: not used as far as I can tell
    call_object_object_name_value_handler: not used as far as I can tell
    object_method_value_handler: not used as far as I can tell
    object_method_name_value_handler: not used as far as I can tell
    member_method_object_value_handler: method defined on a Member subclass

"""
import pytest
from atom.api import (Atom, Int, Constant, Signal, ReadOnly, SetAttr)


@pytest.mark.parametrize("member", [(Signal(), ), (Constant(1), )])
def test_unsettable(member):
    """Test that unsettable members do raise the proper error.

    """
    class Unsettable(Atom):

        m = member

    u = Unsettable()
    with pytest.raises(AttributeError):
        u.m = None


def test_read_only_behavior():
    """Test the behavior of read only member.
Пример #15
0
    signal_handler: siganls are not settable
    delegate_handler: not tested here (see test_delegate.py)
    property_handler: not tested here (see test_property.py)
    call_object_object_value_handler: use a custom function
    call_object_object_name_value_handler: use a custom function
    object_method_value_handler: use an object method
    object_method_name_value_handler: use an object method
    member_method_object_value_handler: method defined on a Member subclass

"""
import pytest
from atom.api import (Atom, Int, Constant, Signal, ReadOnly, SetAttr)


@pytest.mark.parametrize("member, mode", [(Signal(), 'Signal'),
                                          (Constant(1), 'Constant')])
def test_unsettable(member, mode):
    """Test that unsettable members do raise the proper error.

    """
    class Unsettable(Atom):

        m = member

    u = Unsettable()
    assert u.get_member('m').setattr_mode[0] == getattr(SetAttr, mode)
    with pytest.raises(TypeError) as excinfo:
        u.m = 1
    assert mode.lower() in excinfo.exconly()

Пример #16
0
class Item(HasEvaluableFields):
    """ Base component of a pulse Sequence

    """
    #: Identifier for the build dependency collector
    dep_type = Constant(DEP_TYPE).tag(pref=True)

    #: Index identifying the item inside the sequence.
    index = Int()

    #: Flag to disable a particular item.
    enabled = Bool(True).tag(pref=True)

    #: Class of the item to use when rebuilding a sequence.
    item_id = Unicode().tag(pref=True)

    #: Name of the variable which can be referenced in other items.
    #: Those should not contain the index of the item.
    linkable_vars = List()

    #: Reference to the parent sequence.
    parent = ForwardTyped(sequence)

    #: Reference to the root sequence.
    #: Not type checking is performed to allow templates to remplace the real
    #: root.
    # TODO use an ABC for sequences (will need a name : AbstratRootSequence)
    root = Value()

    #: Boolean representing whever this item has a root sequence or not
    has_root = Bool(False)

    #: Mode defining how the def_1 and def_2 attrs shiould be interpreted.
    def_mode = Enum('Start/Stop', 'Start/Duration',
                    'Duration/Stop').tag(pref=True)

    #: String representing the item first element of definition : according
    #: to the selected mode it evaluated value will either be used for the
    #: start instant, or duration of the item.
    def_1 = Unicode().tag(pref=True, feval=Feval(types=Real))

    #: String representing the item second element of definition : according
    #: to the selected mode it evaluated value will either be used for the
    #: duration, or stop instant of the item.
    def_2 = Unicode().tag(pref=True, feval=Feval(types=Real))

    #: Actual start instant of the item with respect to the beginning of the
    #: root sequence. The unit of this time depends of the setting of the
    #: context.
    start = Float()

    #: Actual duration of the item. The unit of this time depends of the
    #: setting of the context.
    duration = Float()

    #: Actual stop instant of the item with respect to the beginning of the
    #: root sequence. The unit of this time depends of the setting of the
    #: context.
    stop = Float()

    def eval_entries(self, root_vars, sequence_locals, missings, errors):
        """ Attempt to eval the  def_1 and def_2 parameters of the item.

        Parameters
        ----------
        root_vars : dict
            Dictionary of global variables for the all items. This will
            tipically contains the i_start/stop/duration and the root vars.

        sequence_locals : dict
            Dictionary of variables whose scope is limited to this item
            parent.

        missings : set
            Set of unfound local variables.

        errors : dict
            Dict of the errors which happened when performing the evaluation.

        Returns
        -------
        flag : bool
            Boolean indicating whether or not the evaluation succeeded.

        """
        res = super(Item, self).eval_entries(root_vars, sequence_locals,
                                             missings, errors)

        # Reference to the sequence context.
        context = self.root.context

        # Name of the parameter which will be evaluated.
        par1 = self.def_mode.split('/')[0].lower()
        par2 = self.def_mode.split('/')[1].lower()
        prefix = '{}_'.format(self.index)

        # Validation of the first parameter.
        d1 = self._cache.get('def_1')
        if 'def_1' in self._cache:
            try:
                d1 = context.check_time(d1)
            except Exception as e:
                res = False
                errors[prefix + par1] = repr(e)
            else:
                # Check the value makes sense as a start time or duration.
                if d1 >= 0:
                    setattr(self, par1, d1)
                    root_vars[prefix + par1] = d1
                    sequence_locals[prefix + par1] = d1
                else:
                    res = False
                    if par1 == 'start':
                        m = 'Got a strictly negative value for start: {}'
                    else:
                        m = 'Got a strictly negative value for duration: {}'

                    errors[prefix + par1] = m.format(d1)

        # Validation of the second parameter.
        if 'def_2' in self._cache:
            d2 = self._cache['def_2']
            try:
                d2 = context.check_time(d2)
            except Exception as e:
                res = False
                errors[prefix + par2] = repr(e)
            else:
                # Check the value makes sense as a duration or stop time.
                if d2 >= 0 and (par2 == 'duration' or d1 is None or d2 >= d1):
                    setattr(self, par2, d2)
                    root_vars[prefix + par2] = d2
                    sequence_locals[prefix + par2] = d2
                else:
                    res = False
                    if par2 == 'stop' and d2 <= 0.0:
                        msg = 'Got a negative or null value for stop: {}'
                        args = (d2, )
                    elif par2 == 'stop':
                        msg = 'Got a stop smaller than start: {} < {}'
                        args = (d1, d2)
                    elif d2 < 0.0:
                        msg = 'Got a negative value for duration: {}'
                        args = (d2, )

                    errors[prefix + par2] = msg.format(*args)

        # Computation of the third parameter.
        if 'def_1' in self._cache and 'def_2' in self._cache and res:
            if self.def_mode == 'Start/Duration':
                self.stop = d1 + d2
                root_vars[prefix + 'stop'] = self.stop
                sequence_locals[prefix + 'stop'] = self.stop
            elif self.def_mode == 'Start/Stop':
                self.duration = d2 - d1
                root_vars[prefix + 'duration'] = self.duration
                sequence_locals[prefix + 'duration'] = self.duration
            else:
                self.start = d2 - d1
                root_vars[prefix + 'start'] = self.start
                sequence_locals[prefix + 'start'] = self.start

        return res

    def traverse(self, depth=-1):
        """Yield an item and all of its components.

        The base implementation simply yields the item itself.

        Parameters
        ----------
        depth : int
            How deep should we explore the tree of items. When this number
            reaches zero deeper children should not be explored but simply
            yielded. Negative values should be considered valid.

        """
        yield self

    def format_global_vars_id(self, member):
        """Format the id under which a value is added in the global vars.

        def_1 and def_2 are not added so no special case here.

        """
        return '{}_{}'.format(self.index, member)

    def format_error_id(self, member):
        """Create the error id for a member.

        Special case def1 and 2 to provide a clearer error id.

        """
        if member in ('def_1', 'def_2'):
            ind = 0 if member == 'def_1' else 1
            member = self.def_mode.split('/')[ind].lower()

        return '{}_{}'.format(self.index, member)

    # --- Private API ---------------------------------------------------------

    def _default_item_id(self):
        """ Default value for the item_id member.

        """
        pack, _ = self.__module__.split('.', 1)
        return pack + '.' + type(self).__name__

    def _post_setattr_root(self, old, new):
        """Make sure that all children get all the info they need to behave
        correctly when the item get its root parent.

        """
        if new is None:
            self.has_root = False
            return

        self.has_root = True
Пример #17
0
class BaseTask(Atom):
    """Base  class defining common members of all Tasks.

    This class basically defines the minimal skeleton of a Task in term of
    members and methods.

    """
    #: Identifier for the build dependency collector
    dep_type = Constant(DEP_TYPE).tag(pref=True)

    #: Name of the class, used for persistence.
    task_id = Unicode().tag(pref=True)

    #: Name of the task this should be unique in hierarchy.
    name = Unicode().tag(pref=True)

    #: Depth of the task in the hierarchy. this should not be manipulated
    #: directly by user code.
    depth = Int()

    #: Reference to the Section in which the task stores its preferences.
    preferences = Typed(Section)

    #: Reference to the database used by the task to exchange information.
    database = Typed(TaskDatabase)

    #: Entries the task declares in the database and the associated default
    #: values. This should be copied and re-assign when modified not modfied
    #: in place.
    database_entries = Dict(Unicode(), Value())

    #: Path of the task in the hierarchy. This refers to the parent task and
    #: is used when writing in the database.
    path = Unicode()

    #: Reference to the root task in the hierarchy.
    root = ForwardTyped(lambda: RootTask)

    #: Refrence to the parent task.
    parent = ForwardTyped(lambda: BaseTask)

    #: Unbound method called when the task is asked to do its job. This is
    #: basically the perform method but wrapped with useful stuff such as
    #: interruption check or parallel, wait features.
    perform_ = Callable()

    #: Flag indicating if this task can be stopped.
    stoppable = Bool(True).tag(pref=True)

    #: Dictionary indicating whether the task is executed in parallel
    #: ('activated' key) and which is pool it belongs to ('pool' key).
    parallel = Dict(Unicode()).tag(pref=True)

    #: Dictionary indicating whether the task should wait on any pool before
    #: performing its job. Three valid keys can be used :
    #: - 'activated' : a bool indicating whether or not to wait.
    #: - 'wait' : the list should then specify which pool should be waited.
    #: - 'no_wait' : the list should specify which pool not to wait on.
    wait = Dict(Unicode()).tag(pref=True)

    #: Dict of access exception in the database. This should not be manipulated
    #: by user code.
    access_exs = Dict().tag(pref=True)

    def perform(self):
        """ Main method of the task called when the measurement is performed.

        """
        raise NotImplementedError(
            fill(
                cleandoc('''This method should be implemented by subclasses of
            BaseTask. This method is called when the program requires the task
            to perform its job.''')))

    def check(self, *args, **kwargs):
        """Check that everything is alright before starting a measurement.

        By default tries to format all members tagged with 'fmt' and try to
        eval all members tagged with 'feval'.

        """
        res = True
        traceback = {}
        err_path = self.get_error_path()
        for n, m in tagged_members(self, 'fmt').items():
            try:
                val = self.format_string(getattr(self, n))
                if n in self.database_entries:
                    self.write_in_database(n, val)
            except Exception:
                if m.metadata['fmt'] != 'Warn':
                    res = False
                msg = 'Failed to format %s : %s' % (n, format_exc())
                traceback[err_path + '-' + n] = msg

        for n, m in tagged_members(self, 'feval').items():
            val = m.metadata['feval']
            if not isinstance(val, validators.Feval):
                res = False
                msg = 'Feval validator is not a subclass validators.Feval'
            else:
                value, f_res, msg = val.check(self, n)
                res &= f_res

            if msg:
                traceback[err_path + '-' + n] = msg
            elif value is not None and n in self.database_entries:
                self.write_in_database(n, value)

        return res, traceback

    def prepare(self):
        """Prepare the task to be performed.

        This method is called once by the root task before starting the
        execution of its children tasks. By default it simply build the
        perform\_ method by wrapping perform with the appropriate decorators.
        This method can be overridden to execute other actions, however keep in
        my mind that those actions must not depende on the state of the system
        (no link to database).

        """
        perform_func = self.perform.__func__
        parallel = self.parallel
        if parallel.get('activated') and parallel.get('pool'):
            perform_func = make_parallel(perform_func, parallel['pool'])

        wait = self.wait
        if wait.get('activated'):
            perform_func = make_wait(perform_func, wait.get('wait'),
                                     wait.get('no_wait'))

        if self.stoppable:
            perform_func = make_stoppable(perform_func)

        self.perform_ = MethodType(perform_func, self)

    def register_preferences(self):
        """Create the task entries in the preferences object.

        """
        raise NotImplementedError()

    def update_preferences_from_members(self):
        """Update the entries in the preference object.

        """
        raise NotImplementedError()

    @classmethod
    def build_from_config(cls, config, dependencies):
        """Create a new instance using the provided infos for initialisation.

        Parameters
        ----------
        config : dict(str)
            Dictionary holding the new values to give to the members in string
            format, or dictionnary like for instance with prefs.

        dependencies : dict
            Dictionary holding the necessary classes needed when rebuilding..

        """
        raise NotImplementedError()

    def traverse(self, depth=-1):
        """Yield a task and all of its components.

        The base implementation simply yields the task itself.

        Parameters
        ----------
        depth : int
            How deep should we explore the tree of tasks. When this number
            reaches zero deeper children should not be explored but simply
            yielded.

        """
        yield self

    def register_in_database(self):
        """ Register the task entries into the database.

        """
        if self.database_entries:
            for entry in self.database_entries:
                # Perform a deepcopy of the entry value as I don't want to
                # alter that default value when dealing with the database later
                # on (apply for list and dict).
                value = deepcopy(self.database_entries[entry])
                self.write_in_database(entry, value)

            for access_ex, level in self.access_exs.items():
                self._add_access_exception(access_ex, level)

    def unregister_from_database(self):
        """ Remove the task entries from the database.

        """
        if self.database_entries:
            for entry in self.database_entries:
                self.database.delete_value(self.path, self._task_entry(entry))

            for access_ex, level in self.access_exs.items():
                self._remove_access_exception(access_ex, level)

    def add_access_exception(self, entry, level):
        """Add an access exception for an entry.

        Parameters
        ----------
        entry : unicode
            Name of the task database entry for which to add an exception.

        level : int
            Number of hierarchical levels to go up when adding the exception.

        """
        self._add_access_exception(entry, level)
        access_exs = self.access_exs.copy()
        access_exs[entry] = level
        self.access_exs = access_exs

    def modify_access_exception(self, entry, new):
        """Modify the level of an existing access exception.

        Parameters
        ----------
        entry : unicode
            Name of the task database entry for which to modify an exception.

        new : int
            New level for the access exception. If this is not strictly
            positive the access exception is simply removed.

        """
        access_exs = self.access_exs.copy()
        old = access_exs[entry]
        if new > 0:
            access_exs[entry] = new
        else:
            del access_exs[entry]
        full_name = self._task_entry(entry)

        parent = self
        while old:
            parent = parent.parent
            old -= 1
        self.database.remove_access_exception(parent.path, full_name)

        if new > 0:
            parent = self
            while new:
                parent = parent.parent
                new -= 1
            self.database.add_access_exception(parent.path, self.path,
                                               full_name)

        self.access_exs = access_exs

    def remove_access_exception(self, entry):
        """Remove an access exception .

        Parameters
        ----------
        entry : unicode
            Name of the task database entry for which to remove an exception.

        """
        access_exs = self.access_exs.copy()
        level = access_exs.pop(entry)
        self.access_exs = access_exs
        self._remove_access_exception(entry, level)

    def write_in_database(self, name, value):
        """Write a value to the right database entry.

        This method build a task specific database entry from the name
        and the name argument and set the database entry to the specified
        value.

        Parameters
        ----------
        name : str
            Simple name of the entry whose value should be set, ie no task name
            required.

        value:
            Value to give to the entry.

        """
        value_name = self._task_entry(name)
        return self.database.set_value(self.path, value_name, value)

    def get_from_database(self, full_name):
        """Access to a database value using full name.

        Parameters
        ----------
        full_name : str
            Full name of the database entry, ie name + '_' + entry,
            where name is the name of the task that wrote the value in
            the database.

        """
        return self.database.get_value(self.path, full_name)

    def remove_from_database(self, full_name):
        """Delete a database entry using its full name.

        Parameters
        ----------
        full_name : str
            Full name of the database entry, ie name + '_' + entry,
            where name is the name of the task that wrote the value in
            the database.

        """
        return self.database.delete_value(self.path, full_name)

    def list_accessible_database_entries(self):
        """List the database entries accessible from this task.

        """
        return self.database.list_accessible_entries(self.path)

    def format_string(self, string):
        """Replace values between {} by their corresponding database value.

        Parameters
        ----------
        string : str
            The string to format using the current values of the database.

        Returns
        -------
        formatted : str
            Formatted version of the input.

        """
        # If a cache evaluation of the string already exists use it.
        if string in self._format_cache:
            preformatted, ids = self._format_cache[string]
            vals = self.database.get_values_by_index(ids, PREFIX)
            return preformatted.format(**vals)

        # Otherwise if we are in running mode build a cache formatting.
        elif self.database.running:
            database = self.database
            aux_strings = string.split('{')
            if len(aux_strings) > 1:
                elements = [el for aux in aux_strings for el in aux.split('}')]
                database_indexes = database.get_entries_indexes(
                    self.path, elements[1::2])
                str_to_format = ''
                length = len(elements)
                for i in range(0, length, 2):
                    if i + 1 < length:
                        repl = PREFIX + str(database_indexes[elements[i + 1]])
                        str_to_format += elements[i] + '{' + repl + '}'
                    else:
                        str_to_format += elements[i]

                indexes = database_indexes.values()
                self._format_cache[string] = (str_to_format, indexes)
                vals = self.database.get_values_by_index(indexes, PREFIX)
                return str_to_format.format(**vals)
            else:
                self._format_cache[string] = (string, [])
                return string

        # In edition mode simply perfom the formatting as execution time is not
        # critical.
        else:
            database = self.database
            aux_strings = string.split('{')
            if len(aux_strings) > 1:
                elements = [el for aux in aux_strings for el in aux.split('}')]
                replacement_values = [
                    database.get_value(self.path, key)
                    for key in elements[1::2]
                ]
                str_to_format = ''
                for key in elements[::2]:
                    str_to_format += key + '{}'

                str_to_format = str_to_format[:-2]

                return str_to_format.format(*replacement_values)
            else:
                return string

    def format_and_eval_string(self, string):
        """ Replace values in {} by their corresponding database value and eval

        Parameters
        ----------
        string : str
            The string to eval using the current values of the database.

        Returns
        -------
        value : object
            Evaluated version of the input.

        """
        # If a cache evaluation of the string already exists use it.
        if string in self._eval_cache:
            preformatted, ids = self._eval_cache[string]
            vals = self.database.get_values_by_index(ids, PREFIX)
            return safe_eval(preformatted, vals)

        # Otherwise if we are in running mode build a cache evaluation.
        elif self.database.running:
            database = self.database
            aux_strings = string.split('{')
            if len(aux_strings) > 1:
                elements = [el for aux in aux_strings for el in aux.split('}')]
                database_indexes = database.get_entries_indexes(
                    self.path, elements[1::2])
                str_to_eval = ''
                length = len(elements)
                for i in range(0, length, 2):
                    if i + 1 < length:
                        repl = PREFIX + str(database_indexes[elements[i + 1]])
                        str_to_eval += elements[i] + repl
                    else:
                        str_to_eval += elements[i]

                indexes = database_indexes.values()
                self._eval_cache[string] = (str_to_eval, indexes)
                vals = self.database.get_values_by_index(indexes, PREFIX)
                return safe_eval(str_to_eval, vals)
            else:
                self._eval_cache[string] = (string, [])
                return safe_eval(string, {})

        # In edition mode simply perfom the evaluation as execution time is not
        # critical and as the database has not been collapsed to an indexed
        # representation.
        else:
            database = self.database
            aux_strings = string.split('{')
            if len(aux_strings) > 1:
                elements = [el for aux in aux_strings for el in aux.split('}')]
                replacement_token = [
                    PREFIX + str(i) for i in range(len(elements[1::2]))
                ]
                repl = {
                    PREFIX + str(i): database.get_value(self.path, key)
                    for i, key in enumerate(elements[1::2])
                }
                str_to_format = ''
                for key in elements[::2]:
                    str_to_format += key + '{}'

                str_to_format = str_to_format[:-2]

                expr = str_to_format.format(*replacement_token)
                return safe_eval(expr, repl)
            else:
                return safe_eval(string, {})

    def get_error_path(self):
        """Build the path to use when reporting errors during checks.

        """
        return self.path + '/' + self.name

    # =========================================================================
    # --- Private API ---------------------------------------------------------
    # =========================================================================

    #: Dictionary storing infos necessary to perform fast formatting.
    #: Only used in running mode.
    _format_cache = Dict()

    #: Dictionary storing infos necessary to perform fast evaluation.
    #: Only used in running mode.
    _eval_cache = Dict()

    def _default_task_id(self):
        """Default value for the task_id member.

        """
        pack, _ = self.__module__.split('.', 1)
        return pack + '.' + type(self).__name__

    def _post_setattr_database_entries(self, old, new):
        """Update the database content each time the database entries change.

        """
        if self.database:
            new = set(new)
            old = set(old) if old else set()
            added = new - old
            removed = old - new
            for entry in removed:
                full_name = self._task_entry(entry)
                self.remove_from_database(full_name)
            for entry in added:
                new_value = deepcopy(self.database_entries[entry])
                self.write_in_database(entry, new_value)

            for r in [r for r in removed if r in self.access_exs]:
                self.remove_access_exception(r)

    def _post_setattr_name(self, old, new):
        """Update the database entries as they use the task name.

        """
        if not old or not self.database:
            return

        olds = [old + '_' + e for e in self.database_entries]
        news = [new + '_' + e for e in self.database_entries]
        old_access = {
            old + '_' + k: v
            for k, v in self.access_exs.items() if old + '_' + k in olds
        }
        self.database.rename_values(self.path, olds, news, old_access)

    def _add_access_exception(self, entry, level):
        """Add an access exception without modifying the access_exs member.

        """
        parent = self
        while level:
            parent = parent.parent
            level -= 1
        self.database.add_access_exception(parent.path, self.path,
                                           self._task_entry(entry))

    def _remove_access_exception(self, entry, level):
        """Remove the access without modifying the access_exs member.

        """
        parent = self
        while level:
            parent = parent.parent
            level -= 1
        full_name = self._task_entry(entry)
        self.database.remove_access_exception(parent.path, full_name)

    def _task_entry(self, entry):
        """Build the full name of an entry for a task.

        """
        return self.name + '_' + entry
Пример #18
0
class AbstractShape(HasEvaluableFields):
    """Base class for all shapes.

    Notes
    -----
    The entries of the shape will be evaluated only if the pulse it is attached
    too succeeded in evaluating its entries. Which means that the pulse
    start, stop and duration will always be present in the local variables and
    accessible under '{self.index}_start/stop/duration'.

    When compute is called all the shape evaluated parameters are avialble
    under their name in the _cache attribute.

    """
    #: Identifier for the build dependency collector
    dep_type = Constant(DEP_TYPE).tag(pref=True)

    #: Id of the shape used to query it from the plugin.
    shape_id = Unicode().tag(pref=True)

    #: Index of the parent pulse. This is set when evaluating the entries.
    index = Int()

    def compute(self, time, unit):
        """ Computes the shape of the pulse at a given time.

        Parameters
        ----------
        time : ndarray
            Times at which to compute the modulation.

        unit : str
            Unit in which the time is expressed.

        Returns
        -------
        shape : ndarray
            Amplitudes of the pulse at the given time.

        """
        raise NotImplementedError('')

    def format_error_id(self, member):
        """Assemble the id used to report an evaluation error.

        """
        return '{}_shape_{}'.format(self.index, member)

    def format_global_vars_id(self, member):
        """Shapes are not allowed to store in the global namespace so raise.

        """
        msg = 'Shapes cannot store values as global (from pulse {})'
        raise RuntimeError(msg.format(self.index))

    # =========================================================================
    # --- Private API ---------------------------------------------------------
    # =========================================================================

    def _default_shape_id(self):
        """Compute the shape id.

        """
        pack, _ = self.__module__.split('.', 1)
        return pack + '.' + type(self).__name__
Пример #19
0
    class Unsettable(Atom):

        m = Constant("1")
        m.set_setattr_mode(SetAttr.NoOp, None)
Пример #20
0
class QtStatusItem(QtToolkitObject, ProxyStatusItem):
    """ A Qt implementation of an Enaml ProxyStatusItem.

    """
    #: The status has no widget representation. All child widgets will
    #: be reparented by the status bar during the layout pass.
    widget = Constant(None)

    def create_widget(self):
        """ A reimplemented parent class method.

        """
        pass

    def destroy(self):
        """ A reimplemented parent class destructor.

        """
        del self.declaration

    #--------------------------------------------------------------------------
    # Utility Methods
    #--------------------------------------------------------------------------
    def status_widget(self):
        """ Get the status widget defined for this item.

        """
        d = self.declaration.status_widget()
        if d is not None:
            return d.proxy.widget

    def is_permanent(self):
        """ Get whether this status item should be permanent.

        """
        return self.declaration.mode == 'permanent'

    def stretch(self):
        """ Get the stretch factor to apply to the item.

        """
        return self.declaration.stretch

    #--------------------------------------------------------------------------
    # Child Events
    #--------------------------------------------------------------------------
    def child_added(self, child):
        """ Handle the child added event for the status item.

        """
        parent = self.parent()
        if parent is not None:
            parent.refresh_item(self)

    def child_removed(self, child):
        """ Handle the child removed event for the status item.

        """
        parent = self.parent()
        if parent is not None:
            parent.refresh_item(self)

    #--------------------------------------------------------------------------
    # ProxyStatusItem API
    #--------------------------------------------------------------------------
    def set_mode(self, mode):
        """ Set the mode of the status item.

        """
        parent = self.parent()
        if parent is not None:
            parent.refresh_item(self)

    def set_stretch(self, stretch):
        """ Set the stretch factor of the status item.

        """
        parent = self.parent()
        if parent is not None:
            parent.refresh_item(self)
Пример #21
0
class Container(Frame):
    """ A Frame subclass which provides child layout functionality.

    The Container is the canonical component used to arrange child
    widgets using constraints-based layout. The developer can supply
    a list of constraints on the container which specify how to layout
    it's child widgets.

    There are widgets whose boundaries constraints may not cross. Some
    examples of these would be a ScrollArea or a Notebook. See the
    documentation of a given widget as to whether or not constraints
    may cross its boundaries.

    """
    #: A boolean which indicates whether or not to allow the layout
    #: ownership of this container to be transferred to an ancestor.
    #: This is False by default, which means that every container
    #: get its own layout solver. This improves speed and reduces
    #: memory use (by keeping a solver's internal tableaux small)
    #: but at the cost of not being able to share constraints
    #: across Container boundaries. This flag must be explicitly
    #: marked as True to enable sharing.
    share_layout = d_(Bool(False))

    #: A constant symbolic object that represents the internal left
    #: boundary of the content area of the container.
    contents_left = ConstraintMember()

    #: A constant symbolic object that represents the internal right
    #: boundary of the content area of the container.
    contents_right = ConstraintMember()

    #: A constant symbolic object that represents the internal top
    #: boundary of the content area of the container.
    contents_top = ConstraintMember()

    #: A constant symbolic object that represents the internal bottom
    #: boundary of the content area of the container.
    contents_bottom = ConstraintMember()

    #: A constant symbolic object that represents the internal width of
    #: the content area of the container.
    contents_width = Constant()

    def _default_contents_width(self):
        return self.contents_right - self.contents_left

    #: A constant symbolic object that represents the internal height of
    #: the content area of the container.
    contents_height = Constant()

    def _default_contents_height(self):
        return self.contents_bottom - self.contents_top

    #: A constant symbolic object that represents the internal center
    #: along the vertical direction the content area of the container.
    contents_v_center = Constant()

    def _default_contents_v_center(self):
        return self.contents_top + self.contents_height / 2.0

    #: A constant symbolic object that represents the internal center
    #: along the horizontal direction of the content area of the container.
    contents_h_center = Constant()

    def _default_contents_h_center(self):
        return self.contents_left + self.contents_width / 2.0

    #: A box object which holds the padding for this component. The
    #: padding is the amount of space between the outer boundary box
    #: and the content box. The default padding is (10, 10, 10, 10).
    #: Certain subclasses, such as GroupBox, may provide additional
    #: margin than what is specified by the padding.
    padding = d_(Coerced(Box, (10, 10, 10, 10)))

    #: Containers freely exapnd in width and height. The size hint
    #: constraints for a Container are used when the container is
    #: not sharing its layout. In these cases, expansion of the
    #: container is typically desired.
    hug_width = set_default('ignore')
    hug_height = set_default('ignore')

    #: A reference to the ProxyContainer object.
    proxy = Typed(ProxyContainer)

    #--------------------------------------------------------------------------
    # Public API
    #--------------------------------------------------------------------------
    def widgets(self):
        """ Get the child ConstraintsWidgets defined on the container.

        """
        return [c for c in self.children if isinstance(c, ConstraintsWidget)]

    #--------------------------------------------------------------------------
    # Child Events
    #--------------------------------------------------------------------------
    def child_added(self, child):
        """ Handle the child added event on the container.

        This event handler will request a relayout if the added child
        is an instance of 'ConstraintsWidget'.

        """
        super(Container, self).child_added(child)
        if isinstance(child, ConstraintsWidget):
            self.request_relayout()

    def child_removed(self, child):
        """ Handle the child removed event on the container.

        This event handler will request a relayout if the removed child
        is an instance of 'ConstraintsWidget'.

        """
        super(Container, self).child_removed(child)
        if isinstance(child, ConstraintsWidget):
            self.request_relayout()

    #--------------------------------------------------------------------------
    # Observers
    #--------------------------------------------------------------------------
    @observe(('share_layout', 'padding'))
    def _layout_invalidated(self, change):
        """ A private observer which invalidates the layout.

        """
        # The superclass handler is sufficient.
        super(Container, self)._layout_invalidated(change)

    #--------------------------------------------------------------------------
    # Layout Constraints
    #--------------------------------------------------------------------------
    def layout_constraints(self):
        """ The constraints generation for a Container.

        This method supplies default vbox constraints to the children of
        the container unless the user has given explicit 'constraints'.

        """
        cns = self.constraints[:]
        if not cns:
            cns.append(vbox(*self.widgets()))
        return cns
Пример #22
0
class BaseInterface(HasPrefAtom):
    """Base class to use for interfaces.

    The interface should not re-use member names used by the task to avoid
    issue when walking.

    This class should not be used directly, use one of its subclass.

    """
    #: Class attribute indicating whether this interface has views or not.
    has_view = False

    #: Identifier for the build dependency collector
    dep_type = Constant(DEP_TYPE).tag(pref=True)

    #: Name of the class of the interface and anchor (ie task or interface with
    #: this interface is used with). Used for persistence purposes.
    interface_id = Tuple().tag(pref=True)

    #: Dict of database entries added by the interface.
    database_entries = Dict()

    def check(self, *args, **kwargs):
        """Check that everything is alright before starting a measurement.

        By default tries to format all members tagged with 'fmt' and try to
        eval all members tagged with 'feval'. If the tag value is 'Warn', the
        test will considered passed but a traceback entry will be filled.

        """
        res = True
        traceback = {}
        task = self.task
        err_path = task.path + '/' + task.name
        for n, m in tagged_members(self, 'fmt').items():
            try:
                val = task.format_string(getattr(self, n))
                if n in self.database_entries:
                    task.write_in_database(n, val)
            except Exception:
                if m.metadata['fmt'] != 'Warn':
                    res = False
                msg = 'Failed to format %s : %s' % (n, format_exc())
                traceback[err_path + '-' + n] = msg

        for n, m in tagged_members(self, 'feval').items():
            try:
                val = task.format_and_eval_string(getattr(self, n))
                if n in self.database_entries:
                    task.write_in_database(n, val)
            except Exception:
                if m.metadata['feval'] != 'Warn':
                    res = False
                msg = 'Failed to eval %s : %s' % (n, format_exc())
                traceback[err_path + '-' + n] = msg

        return res, traceback

    def perform(self, *args, **kwargs):
        """Method called by the parent perform method.

        """
        raise NotImplementedError()

    def traverse(self, depth=-1):
        """Method used by to retrieve information about a task.

        Parameters
        ----------
        depth : int
            How deep should we stop traversing.

        """
        yield self

    @classmethod
    def build_from_config(cls, config, dependencies):
        """ Create an interface using the provided dict.

        """
        interface = cls()
        interface.update_members_from_preferences(config)
        return interface
Пример #23
0
class BaseInstruction(HasPrefsAtom):
    """Base class storing an operation to perform on a driver.

    """
    #: Identifier for the build dependency collector
    dep_type = Constant(DEP_TYPE).tag(pref=True)

    #: Id of the class, used for persistence.
    instruction_id = Str().tag(pref=True)

    #: Id of the instruction. Will be used to store the result of the
    #: instruction in the database if meaningful.
    id = Str().tag(pref=True)

    #: Path pointing to the attribute that should be manipulated (get/set/call)
    #: Should start by "driver."
    path = Str().tag(pref=True)

    #: Channel ids that should be inserted in the path to access the proper
    #: attribute.
    ch_ids = Typed(OrderedDict,
                   ()).tag(pref=(ordered_dict_to_pref, ordered_dict_from_pref))

    #: Hinter responsible for providing a reasonable value for the database
    #: during the checks.
    hinter = Typed(BaseInstructionReturnHinter)

    #: Names under which the instruction ouput should be stored in the database
    database_entries = Dict()

    def check(self, task, driver_cls):
        """Ensure that the path is meaningful and check the hinter.

        Parameters
        ----------
        task : exopy_i3py.tasks.tasks.generic_instr_task.GenericI3pyTask
            Task to which this instruction is attached.

        driver_cls : type
            Driver class with which the instruction will have to work.

        Returns
        -------
        test : bool
            Whether or not te checks are considered successful

        value_or_errors : dict or str
            Dictionary of values to store in the database if the checks
            succeeded or an error message if something went wrong.

        """
        dr = driver_cls
        valid_path = driver_cls.__name__
        for i, part in enumerate(self.path.split('.')):
            if i == 0 and part != 'driver':
                return (False,
                        'The path of the instruction should start by "driver"')
            if '[' in part:
                if ']' not in part:
                    return False, 'Malformed channel access: %s' % part
                ch_id = part.split('[')[1].split(']')[0]
                if ch_id not in self.ch_ids:
                    return (False, 'Unknown channel id %s, know ids are %s' %
                            (ch_id, self.ch_ids))
                part = part.split('[')[0]

            if not hasattr(dr, part):
                return False, '%s has no attribute %s' % (valid_path, part)

            dr = getattr(dr, part)

        try:
            value = self.hinter.provide_hint(self, driver_cls, task)
        except Exception:
            return (False, 'Failed to generate database value hint:\n%s' %
                    format_exc())

        return True, value

    def prepare(self):
        """Prepare the instruction for execution.

        Called once in the lifetime, before execute.

        """
        raise NotImplementedError

    def execute(self, task, driver):
        """Execute the instruction on the provided driver.

        """
        raise NotImplementedError

    def build_from_config(cls, config, dependencies):
        """Build an instruction from a config.

        """
        inst = cls()
        inst.update_members_from_preferences(config)
        hinter_id = config['hinter']['hinter_id']
        hinter_cls = dependencies[HINTER_DEP_TYPE][hinter_id]
        inst.hinter = hinter_cls.build_from_config(config['hinter'],
                                                   dependencies)
        return inst

    # --- Private API ---------------------------------------------------------

    def _default_instruction_id(self):
        """Default value for the instruction_id member.

        """
        pack, _ = self.__module__.split('.', 1)
        return pack + '.' + type(self).__name__
Пример #24
0
class IOLoop(Atom):
    """A level-triggered I/O loop.

    We use ``epoll`` (Linux) or ``kqueue`` (BSD and Mac OS X) if they
    are available, or else we fall back on select(). If you are
    implementing a system that needs to handle thousands of
    simultaneous connections, you should use a system that supports
    either ``epoll`` or ``kqueue``.

    Example usage for a simple TCP server:

    .. testcode::

        import errno
        import functools
        import tornado.ioloop
        import socket

        def connection_ready(sock, fd, events):
            while True:
                try:
                    connection, address = sock.accept()
                except socket.error as e:
                    if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
                        raise
                    return
                connection.setblocking(0)
                handle_connection(connection, address)

        if __name__ == '__main__':
            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
            sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
            sock.setblocking(0)
            sock.bind(("", port))
            sock.listen(128)

            io_loop = tornado.ioloop.IOLoop.current()
            callback = functools.partial(connection_ready, sock)
            io_loop.add_handler(sock.fileno(), callback, io_loop.READ)
            io_loop.start()

    .. testoutput::
       :hide:

    By default, a newly-constructed `IOLoop` becomes the thread's current
    `IOLoop`, unless there already is a current `IOLoop`. This behavior
    can be controlled with the ``make_current`` argument to the `IOLoop`
    constructor: if ``make_current=True``, the new `IOLoop` will always
    try to become current and it raises an error if there is already a
    current instance. If ``make_current=False``, the new `IOLoop` will
    not try to become current.

    In general, an `IOLoop` cannot survive a fork or be shared across
    processes in any way. When multiple processes are being used, each
    process should create its own `IOLoop`, which also implies that
    any objects which depend on the `IOLoop` (such as
    `.AsyncHTTPClient`) must also be created in the child processes.
    As a guideline, anything that starts processes (including the
    `tornado.process` and `multiprocessing` modules) should do so as
    early as possible, ideally the first thing the application does
    after loading its configuration in ``main()``.

    .. versionchanged:: 4.2
       Added the ``make_current`` keyword argument to the `IOLoop`
       constructor.
    """
    # Constants from the epoll module
    _EPOLLIN = Constant(0x001)
    _EPOLLPRI = Constant(0x002)
    _EPOLLOUT = Constant(0x004)
    _EPOLLERR = Constant(0x008)
    _EPOLLHUP = Constant(0x010)
    _EPOLLRDHUP = Constant(0x2000)
    _EPOLLONESHOT = Constant(1 << 30)
    _EPOLLET = Constant(1 << 31)

    # Our events map exactly to the epoll events
    NONE = Constant(0)
    READ = Constant(0x001)
    WRITE = Constant(0x004)
    ERROR = Constant(0x008 | 0x010)

    # Global lock for creating global IOLoop instance
    _instance_lock = threading.Lock()

    _current = threading.local()

    _error_handler = Callable()

    @staticmethod
    def instance():
        """Deprecated alias for `IOLoop.current()`.

        .. versionchanged:: 5.0

           Previously, this method returned a global singleton
           `IOLoop`, in contrast with the per-thread `IOLoop` returned
           by `current()`. In nearly all cases the two were the same
           (when they differed, it was generally used from non-Tornado
           threads to communicate back to the main thread's `IOLoop`).
           This distinction is not present in `asyncio`, so in order
           to facilitate integration with that package `instance()`
           was changed to be an alias to `current()`. Applications
           using the cross-thread communications aspect of
           `instance()` should instead set their own global variable
           to point to the `IOLoop` they want to use.

        .. deprecated:: 5.0
        """
        return IOLoop.current()

    @staticmethod
    def initialized():
        """Returns true if there is a current IOLoop.

        .. versionchanged:: 5.0

           Redefined in terms of `current()` instead of `instance()`.

        .. deprecated:: 5.0

           This method only knows about `IOLoop` objects (and not, for
           example, `asyncio` event loops), so it is of limited use.
        """
        return IOLoop.current(instance=False) is not None

    def install(self):
        """Deprecated alias for `make_current()`.

        .. versionchanged:: 5.0

           Previously, this method would set this `IOLoop` as the
           global singleton used by `IOLoop.instance()`. Now that
           `instance()` is an alias for `current()`, `install()`
           is an alias for `make_current()`.

        .. deprecated:: 5.0
        """
        self.make_current()

    @staticmethod
    def clear_instance():
        """Deprecated alias for `clear_current()`.

        .. versionchanged:: 5.0

           Previously, this method would clear the `IOLoop` used as
           the global singleton by `IOLoop.instance()`. Now that
           `instance()` is an alias for `current()`,
           `clear_instance()` is an alias for `clear_instance()`.

        .. deprecated:: 5.0

        """
        IOLoop.clear_current()

    @staticmethod
    def current(instance=True):
        """Returns the current thread's `IOLoop`.

        If an `IOLoop` is currently running or has been marked as
        current by `make_current`, returns that instance.  If there is
        no current `IOLoop` and ``instance`` is true, creates one.

        .. versionchanged:: 4.1
           Added ``instance`` argument to control the fallback to
           `IOLoop.instance()`.
        .. versionchanged:: 5.0
           The ``instance`` argument now controls whether an `IOLoop`
           is created automatically when there is none, instead of
           whether we fall back to `IOLoop.instance()` (which is now
           an alias for this method)
        """
        current = getattr(IOLoop._current, "instance", None)
        if current is None and instance:
            current = None
            #if asyncio is not None:
            #    from tornado.platform.asyncio import AsyncIOLoop, AsyncIOMainLoop
            #    if IOLoop.configured_class() is AsyncIOLoop:
            #        current = AsyncIOMainLoop()
            if current is None:
                from .platforms import EPollIOLoop
                current = EPollIOLoop()
                current.initialize()
                #current = IOLoop()
            if IOLoop._current.instance is not current:
                raise RuntimeError("new IOLoop did not become current")
        return current

    def make_current(self):
        """Makes this the `IOLoop` for the current thread.

        An `IOLoop` automatically becomes current for its thread
        when it is started, but it is sometimes useful to call
        `make_current` explicitly before starting the `IOLoop`,
        so that code run at startup time can find the right
        instance.

        .. versionchanged:: 4.1
           An `IOLoop` created while there is no current `IOLoop`
           will automatically become current.
        """
        IOLoop._current.instance = self

    @staticmethod
    def clear_current():
        """Clears the `IOLoop` for the current thread.

        Intended primarily for use by test frameworks in between tests.
        """
        IOLoop._current.instance = None

    @classmethod
    def configurable_base(cls):
        return IOLoop

    @classmethod
    def configurable_default(cls):
        #if asyncio is not None:
        #    from tornado.platform.asyncio import AsyncIOLoop
        #    return AsyncIOLoop
        return PollIOLoop

    def initialize(self, make_current=None):
        if make_current is None:
            if IOLoop.current(instance=False) is None:
                self.make_current()
        elif make_current:
            if IOLoop.current(instance=False) is not None:
                raise RuntimeError("current IOLoop already exists")
            self.make_current()

    def close(self, all_fds=False):
        """Closes the `IOLoop`, freeing any resources used.

        If ``all_fds`` is true, all file descriptors registered on the
        IOLoop will be closed (not just the ones created by the
        `IOLoop` itself).

        Many applications will only use a single `IOLoop` that runs for the
        entire lifetime of the process.  In that case closing the `IOLoop`
        is not necessary since everything will be cleaned up when the
        process exits.  `IOLoop.close` is provided mainly for scenarios
        such as unit tests, which create and destroy a large number of
        ``IOLoops``.

        An `IOLoop` must be completely stopped before it can be closed.  This
        means that `IOLoop.stop()` must be called *and* `IOLoop.start()` must
        be allowed to return before attempting to call `IOLoop.close()`.
        Therefore the call to `close` will usually appear just after
        the call to `start` rather than near the call to `stop`.

        .. versionchanged:: 3.1
           If the `IOLoop` implementation supports non-integer objects
           for "file descriptors", those objects will have their
           ``close`` method when ``all_fds`` is true.
        """
        raise NotImplementedError()

    def add_handler(self, fd, handler, events):
        """Registers the given handler to receive the given events for ``fd``.

        The ``fd`` argument may either be an integer file descriptor or
        a file-like object with a ``fileno()`` method (and optionally a
        ``close()`` method, which may be called when the `IOLoop` is shut
        down).

        The ``events`` argument is a bitwise or of the constants
        ``IOLoop.READ``, ``IOLoop.WRITE``, and ``IOLoop.ERROR``.

        When an event occurs, ``handler(fd, events)`` will be run.

        .. versionchanged:: 4.0
           Added the ability to pass file-like objects in addition to
           raw file descriptors.
        """
        raise NotImplementedError()

    def update_handler(self, fd, events):
        """Changes the events we listen for ``fd``.

        .. versionchanged:: 4.0
           Added the ability to pass file-like objects in addition to
           raw file descriptors.
        """
        raise NotImplementedError()

    def remove_handler(self, fd):
        """Stop listening for events on ``fd``.

        .. versionchanged:: 4.0
           Added the ability to pass file-like objects in addition to
           raw file descriptors.
        """
        raise NotImplementedError()

    def set_blocking_signal_threshold(self, seconds, action):
        """Sends a signal if the `IOLoop` is blocked for more than
        ``s`` seconds.

        Pass ``seconds=None`` to disable.  Requires Python 2.6 on a unixy
        platform.

        The action parameter is a Python signal handler.  Read the
        documentation for the `signal` module for more information.
        If ``action`` is None, the process will be killed if it is
        blocked for too long.
        """
        raise NotImplementedError()

    def set_blocking_log_threshold(self, seconds):
        """Logs a stack trace if the `IOLoop` is blocked for more than
        ``s`` seconds.

        Equivalent to ``set_blocking_signal_threshold(seconds,
        self.log_stack)``
        """
        self.set_blocking_signal_threshold(seconds, self.log_stack)

    def log_stack(self, signal, frame):
        """Signal handler to log the stack trace of the current thread.

        For use with `set_blocking_signal_threshold`.
        """
        gen_log.warning('IOLoop blocked for %f seconds in\n%s',
                        self._blocking_signal_threshold,
                        ''.join(traceback.format_stack(frame)))

    def start(self):
        """Starts the I/O loop.

        The loop will run until one of the callbacks calls `stop()`, which
        will make the loop stop after the current event iteration completes.
        """
        raise NotImplementedError()

    def _setup_logging(self):
        """The IOLoop catches and logs exceptions, so it's
        important that log output be visible.  However, python's
        default behavior for non-root loggers (prior to python
        3.2) is to print an unhelpful "no handlers could be
        found" message rather than the actual log entry, so we
        must explicitly configure logging if we've made it this
        far without anything.

        This method should be called from start() in subclasses.
        """
        pass
        # if not any([logging.getLogger().handlers,
        #             logging.getLogger('tornado').handlers,
        #             logging.getLogger('tornado.application').handlers]):
        #     logging.basicConfig()

    def stop(self):
        """Stop the I/O loop.

        If the event loop is not currently running, the next call to `start()`
        will return immediately.

        To use asynchronous methods from otherwise-synchronous code (such as
        unit tests), you can start and stop the event loop like this::

          ioloop = IOLoop()
          async_method(ioloop=ioloop, callback=ioloop.stop)
          ioloop.start()

        ``ioloop.start()`` will return after ``async_method`` has run
        its callback, whether that callback was invoked before or
        after ``ioloop.start``.

        Note that even after `stop` has been called, the `IOLoop` is not
        completely stopped until `IOLoop.start` has also returned.
        Some work that was scheduled before the call to `stop` may still
        be run before the `IOLoop` shuts down.
        """
        raise NotImplementedError()

    def run_sync(self, func, timeout=None):
        """Starts the `IOLoop`, runs the given function, and stops the loop.

        The function must return either a yieldable object or
        ``None``. If the function returns a yieldable object, the
        `IOLoop` will run until the yieldable is resolved (and
        `run_sync()` will return the yieldable's result). If it raises
        an exception, the `IOLoop` will stop and the exception will be
        re-raised to the caller.

        The keyword-only argument ``timeout`` may be used to set
        a maximum duration for the function.  If the timeout expires,
        a `tornado.util.TimeoutError` is raised.

        This method is useful in conjunction with `tornado.gen.coroutine`
        to allow asynchronous calls in a ``main()`` function::

            @gen.coroutine
            def main():
                # do stuff...

            if __name__ == '__main__':
                IOLoop.current().run_sync(main)

        .. versionchanged:: 4.3
           Returning a non-``None``, non-yieldable value is now an error.
        """
        future_cell = [None]

        def run():
            try:
                result = func()
                if result is not None:
                    from .gen import convert_yielded
                    result = convert_yielded(result)
            except Exception:
                future_cell[0] = TracebackFuture()
                future_cell[0].set_exc_info(sys.exc_info())
            else:
                if is_future(result):
                    future_cell[0] = result
                else:
                    future_cell[0] = TracebackFuture()
                    future_cell[0].set_result(result)
            self.add_future(future_cell[0], lambda future: self.stop())

        self.add_callback(run)
        if timeout is not None:
            timeout_handle = self.add_timeout(self.time() + timeout, self.stop)
        self.start()
        if timeout is not None:
            self.remove_timeout(timeout_handle)
        if not future_cell[0].done():
            raise TimeoutError('Operation timed out after %s seconds' %
                               timeout)
        return future_cell[0].result()

    def time(self):
        """Returns the current time according to the `IOLoop`'s clock.

        The return value is a floating-point number relative to an
        unspecified time in the past.

        By default, the `IOLoop`'s time function is `time.time`.  However,
        it may be configured to use e.g. `time.monotonic` instead.
        Calls to `add_timeout` that pass a number instead of a
        `datetime.timedelta` should use this function to compute the
        appropriate time, so they can work no matter what time function
        is chosen.
        """
        return time.time()

    def add_timeout(self, deadline, callback, *args, **kwargs):
        """Runs the ``callback`` at the time ``deadline`` from the I/O loop.

        Returns an opaque handle that may be passed to
        `remove_timeout` to cancel.

        ``deadline`` may be a number denoting a time (on the same
        scale as `IOLoop.time`, normally `time.time`), or a
        `datetime.timedelta` object for a deadline relative to the
        current time.  Since Tornado 4.0, `call_later` is a more
        convenient alternative for the relative case since it does not
        require a timedelta object.

        Note that it is not safe to call `add_timeout` from other threads.
        Instead, you must use `add_callback` to transfer control to the
        `IOLoop`'s thread, and then call `add_timeout` from there.

        Subclasses of IOLoop must implement either `add_timeout` or
        `call_at`; the default implementations of each will call
        the other.  `call_at` is usually easier to implement, but
        subclasses that wish to maintain compatibility with Tornado
        versions prior to 4.0 must use `add_timeout` instead.

        .. versionchanged:: 4.0
           Now passes through ``*args`` and ``**kwargs`` to the callback.
        """
        if isinstance(deadline, numbers.Real):
            return self.call_at(deadline, callback, *args, **kwargs)
        elif isinstance(deadline, datetime.timedelta):
            return self.call_at(self.time() + timedelta_to_seconds(deadline),
                                callback, *args, **kwargs)
        else:
            raise TypeError("Unsupported deadline %r" % deadline)

    def call_later(self, delay, callback, *args, **kwargs):
        """Runs the ``callback`` after ``delay`` seconds have passed.

        Returns an opaque handle that may be passed to `remove_timeout`
        to cancel.  Note that unlike the `asyncio` method of the same
        name, the returned object does not have a ``cancel()`` method.

        See `add_timeout` for comments on thread-safety and subclassing.

        .. versionadded:: 4.0
        """
        return self.call_at(self.time() + delay, callback, *args, **kwargs)

    def call_at(self, when, callback, *args, **kwargs):
        """Runs the ``callback`` at the absolute time designated by ``when``.

        ``when`` must be a number using the same reference point as
        `IOLoop.time`.

        Returns an opaque handle that may be passed to `remove_timeout`
        to cancel.  Note that unlike the `asyncio` method of the same
        name, the returned object does not have a ``cancel()`` method.

        See `add_timeout` for comments on thread-safety and subclassing.

        .. versionadded:: 4.0
        """
        return self.add_timeout(when, callback, *args, **kwargs)

    def remove_timeout(self, timeout):
        """Cancels a pending timeout.

        The argument is a handle as returned by `add_timeout`.  It is
        safe to call `remove_timeout` even if the callback has already
        been run.
        """
        raise NotImplementedError()

    def add_callback(self, callback, *args, **kwargs):
        """Calls the given callback on the next I/O loop iteration.

        It is safe to call this method from any thread at any time,
        except from a signal handler.  Note that this is the **only**
        method in `IOLoop` that makes this thread-safety guarantee; all
        other interaction with the `IOLoop` must be done from that
        `IOLoop`'s thread.  `add_callback()` may be used to transfer
        control from other threads to the `IOLoop`'s thread.

        To add a callback from a signal handler, see
        `add_callback_from_signal`.
        """
        raise NotImplementedError()

    def add_callback_from_signal(self, callback, *args, **kwargs):
        """Calls the given callback on the next I/O loop iteration.

        Safe for use from a Python signal handler; should not be used
        otherwise.

        Callbacks added with this method will be run without any
        `.stack_context`, to avoid picking up the context of the function
        that was interrupted by the signal.
        """
        raise NotImplementedError()

    def spawn_callback(self, callback, *args, **kwargs):
        """Calls the given callback on the next IOLoop iteration.

        Unlike all other callback-related methods on IOLoop,
        ``spawn_callback`` does not associate the callback with its caller's
        ``stack_context``, so it is suitable for fire-and-forget callbacks
        that should not interfere with the caller.

        .. versionadded:: 4.0
        """
        with stack_context.NullContext():
            self.add_callback(callback, *args, **kwargs)

    def add_future(self, future, callback):
        """Schedules a callback on the ``IOLoop`` when the given
        `.Future` is finished.

        The callback is invoked with one argument, the
        `.Future`.
        """
        assert is_future(future)
        callback = stack_context.wrap(callback)
        future.add_done_callback(
            lambda future: self.add_callback(callback, future))

    def _run_callback(self, callback):
        """Runs a callback with error handling.

        For use in subclasses.
        """
        try:
            ret = callback()
            if ret is not None:
                from . import gen
                # Functions that return Futures typically swallow all
                # exceptions and store them in the Future.  If a Future
                # makes it out to the IOLoop, ensure its exception (if any)
                # gets logged too.
                try:
                    ret = gen.convert_yielded(ret)
                except gen.BadYieldError:
                    # It's not unusual for add_callback to be used with
                    # methods returning a non-None and non-yieldable
                    # result, which should just be ignored.
                    pass
                else:
                    self.add_future(ret, self._discard_future_result)
        except Exception:
            self.handle_callback_exception(callback)

    def _discard_future_result(self, future):
        """Avoid unhandled-exception warnings from spawned coroutines."""
        future.result()

    def set_callback_exception_handler(self, handler):
        """ Change the exception handler """
        self._error_handler = handler

    def handle_callback_exception(self, callback):
        """This method is called whenever a callback run by the `IOLoop`
        throws an exception.

        By default simply logs the exception as an error.  Subclasses
        may override this method to customize reporting of exceptions.

        The exception itself is not passed explicitly, but is available
        in `sys.exc_info`.
        """
        if self._error_handler:
            self._error_handler(callback)
        else:
            app_log.error("Exception in callback %r", callback, exc_info=True)

    def split_fd(self, fd):
        """Returns an (fd, obj) pair from an ``fd`` parameter.

        We accept both raw file descriptors and file-like objects as
        input to `add_handler` and related methods.  When a file-like
        object is passed, we must retain the object itself so we can
        close it correctly when the `IOLoop` shuts down, but the
        poller interfaces favor file descriptors (they will accept
        file-like objects and call ``fileno()`` for you, but they
        always return the descriptor itself).

        This method is provided for use by `IOLoop` subclasses and should
        not generally be used by application code.

        .. versionadded:: 4.0
        """
        try:
            return fd.fileno(), fd
        except AttributeError:
            return fd, fd

    def close_fd(self, fd):
        """Utility method to close an ``fd``.

        If ``fd`` is a file-like object, we close it directly; otherwise
        we use `os.close`.

        This method is provided for use by `IOLoop` subclasses (in
        implementations of ``IOLoop.close(all_fds=True)`` and should
        not generally be used by application code.

        .. versionadded:: 4.0
        """
        try:
            try:
                fd.close()
            except AttributeError:
                os.close(fd)
        except OSError:
            pass
Пример #25
0
class ConstraintsWidget(Widget):
    """ A Widget subclass which adds constraint information.

    A ConstraintsWidget is augmented with symbolic constraint variables
    which define a box model on the widget. This box model is used to
    declare constraints between this widget and other components which
    participate in constraints-based layout.

    Constraints are added to a widget by assigning a list to the
    'constraints' attribute. This list may contain raw LinearConstraint
    objects (which are created by manipulating the symbolic constraint
    variables) or DeferredConstraints objects which generated these
    LinearConstraint objects on-the-fly.

    """
    #: The list of user-specified constraints or constraint-generating
    #: objects for this component.
    constraints = d_(List())

    #: A constant symbolic object that represents the left boundary of
    #: the widget.
    left = ConstraintMember()

    #: A constant symbolic object that represents the top boundary of
    #: the widget.
    top = ConstraintMember()

    #: A constant symbolic object that represents the width of the
    #: widget.
    width = ConstraintMember()

    #: A constant symbolic object that represents the height of the
    #: widget.
    height = ConstraintMember()

    #: A constant symbolic object that represents the right boundary
    #: of the component. This is computed as left + width.
    right = Constant()

    def _default_right(self):
        return self.left + self.width

    #: A constant symbolic object that represents the bottom boundary
    #: of the component. This is computed as top + height.
    bottom = Constant()

    def _default_bottom(self):
        return self.top + self.height

    #: A constant symbolic object that represents the vertical center
    #: of the width. This is computed as top + 0.5 * height.
    v_center = Constant()

    def _default_v_center(self):
        return self.top + self.height / 2.0

    #: A constant symbolic object that represents the horizontal center
    #: of the widget. This is computed as left + 0.5 * width.
    h_center = Constant()

    def _default_h_center(self):
        return self.left + self.width / 2.0

    #: How strongly a component hugs it's width hint. Valid strengths
    #: are 'weak', 'medium', 'strong', 'required' and 'ignore'. Default
    #: is 'strong'. This can be overridden on a per-control basis to
    #: specify a logical default for the given control.
    hug_width = d_(PolicyEnum('strong'))

    #: How strongly a component hugs it's height hint. Valid strengths
    #: are 'weak', 'medium', 'strong', 'required' and 'ignore'. Default
    #: is 'strong'. This can be overridden on a per-control basis to
    #: specify a logical default for the given control.
    hug_height = d_(PolicyEnum('strong'))

    #: How strongly a component resists clipping its contents. Valid
    #: strengths are 'weak', 'medium', 'strong', 'required' and 'ignore'.
    #: The default is 'strong' for width.
    resist_width = d_(PolicyEnum('strong'))

    #: How strongly a component resists clipping its contents. Valid
    #: strengths are 'weak', 'medium', 'strong', 'required' and 'ignore'.
    #: The default is 'strong' for height.
    resist_height = d_(PolicyEnum('strong'))

    #: A reference to the ProxyConstraintsWidget object.
    proxy = Typed(ProxyConstraintsWidget)

    #--------------------------------------------------------------------------
    # Observers
    #--------------------------------------------------------------------------
    @observe(('constraints', 'hug_width', 'hug_height', 'resist_width',
        'resist_height'))
    def _layout_invalidated(self, change):
        """ An observer which will relayout the proxy widget.

        """
        self.request_relayout()

    #--------------------------------------------------------------------------
    # Public API
    #--------------------------------------------------------------------------
    def request_relayout(self):
        """ Request a relayout from the proxy widget.

        This will invoke the 'request_relayout' method on an active
        proxy. The proxy should collapse the requests as necessary.

        """
        if self.proxy_is_active:
            self.proxy.request_relayout()

    def when(self, switch):
        """ A method which returns `self` or None based on the truthness
        of the argument.

        This can be useful to easily turn off the effects of an object
        in constraints-based layout.

        Parameters
        ----------
        switch : bool
            A boolean which indicates whether this instance or None
            should be returned.

        Returns
        -------
        result : self or None
            If 'switch' is boolean True, self is returned. Otherwise,
            None is returned.

        """
        if switch:
            return self

    #--------------------------------------------------------------------------
    # Private API
    #--------------------------------------------------------------------------
    def _collect_constraints(self):
        """ The constraints to use for the component.

        This will return the expanded list of constraints to use for
        the component. It will not include the hard constraints.

        """
        cns = self.constraints
        if not cns:
            cns = self._get_default_constraints()
        cns += self._component_constraints()
        return list(expand_constraints(self, cns))

    def _hard_constraints(self):
        """ The constraints required for the component.

        These are constraints that must apply to the internal layout
        computations of a component as well as that of containers which
        may parent this component. By default, all components will have
        their 'left', 'right', 'width', and 'height' symbols constrained
        to >= 0. Subclasses which need to add more constraints should
        reimplement this method.

        """
        cns = [
            self.left >= 0, self.top >= 0,
            self.width >= 0, self.height >= 0,
        ]
        return cns

    def _component_constraints(self):
        """ The required constraints for a particular component.

        These are constraints which should be applied on top of any user
        constraints and hard constraints. The default implementation
        returns an empty list.

        """
        return []

    def _get_default_constraints(self):
        """ The constraints to include if the user has none defined.

        These are constraints to include if the user has not specified
        their own in the 'constraints' list. The default implementation
        returns an empty list.

        """
        return []
Пример #26
0
class State(Model):
    """ An Atom object that automatically saves and restores it's state
    when a member changes. Saves are queued and fired once to reduce save

    """
    _instance = None

    #: File where state is saved within the assets folder
    #: relative to assets/python. By default this is outside the python
    #: folder so it is not overwritten when a new version of the app
    #: is installed (ex. otherwise when you push an update to the Play store
    #: it will overwrite the users saved state!)
    _state_file = Constant(os.path.join(sys.path[0], '../state.db'))
    _state_save_pending = Int().tag(persist=False)
    _state_members = List(Member).tag(persist=False)

    @classmethod
    def instance(cls):
        """ Get an instance of this object """
        if State._instance is None:
            cls()
        return State._instance

    def __init__(self, *args, **kwargs):
        """ Create an instance of the state. This should only be called
        once or a RuntimeError will be raised.

        """
        if State._instance is not None:
            raise RuntimeError("Only one instance of AppState can exist!")
        super(State, self).__init__(*args, **kwargs)
        State._instance = self
        self._bind_observers()

    # -------------------------------------------------------------------------
    # State API
    # -------------------------------------------------------------------------
    def save(self):
        """ Manually trigger a save """
        self._queue_save_state({'type': 'manual'})

    def _bind_observers(self):
        """ Try to load the plugin state """
        #: Build state members list
        for name, member in self.members().items():
            if not member.metadata or member.metadata.get('persist', True):
                self._state_members.append(member)

        #: Get the valid state keys
        persistent_members = [m.name for m in self._state_members]
        print("State members: {}".format(persistent_members))

        #: Load the state from disk
        try:
            with open(self._state_file, 'rb') as f:
                state = pickle.load(f)

            #: Delete anything that may have changed
            for k, v in state.items():
                if k not in persistent_members:
                    del state[k]
            print("Restoring state: {}".format(state))
            self.__setstate__(state)
        except Exception as e:
            print("Failed to load state: {}".format(e))

        #: Hook up observers to automatically save when a change occurs
        for m in self._state_members:
            self.observe(m.name, self._queue_save_state)

    def _queue_save_state(self, change):
        """ Queue a save state. This calls _save_state after a given duration
        so that multiple state changes get batched and saved all at once
        instead of saving multiple times (which a become slow).

        """
        if change['type'] in ['update', 'manual', 'container']:
            self._state_save_pending += 1
            timed_call(350, self._save_state, change)

    def _save_state(self, change):
        """ Actually save the state once all the pending state changes
        settle out (when _state_save_pending==0).

        """
        #: Wait until the last change occurs
        self._state_save_pending -= 1
        if self._state_save_pending != 0:
            return

        #: Actually change
        try:
            print("Saving state due to change: {}".format(change))

            #: Dump first so any failure to encode doesn't wipe out the
            #: previous state

            state = self.__getstate__()
            for k in state:
                if k in self._state_members:
                    del state[k]
            state = pickle.dumps(state)

            with open(self._state_file, 'wb') as f:
                f.write(state)
        except Exception as e:
            print("Failed to save state: {}".format(e))

    def _unbind_observers(self):
        """ Stop observing the state. """
        for member in self._state_members:
            self.unobserve(member.name, self._queue_save_state)
Пример #27
0
class RootTask(ComplexTask):
    """Special task which is always the root of a measurement.

    On this class and this class only perform can and should be called
    directly.

    """
    #: Path to which log infos, preferences, etc should be written by default.
    default_path = Unicode('').tag(pref=True)

    #: Should the execution be profiled.
    should_profile = Bool().tag(pref=True)

    #: Dict storing data needed at execution time (ex: drivers classes)
    run_time = Dict()

    #: Inter-process event signaling the task it should stop execution.
    should_stop = Typed(Event)

    #: Inter-process event signaling the task it should pause execution.
    should_pause = Typed(Event)

    #: Inter-process event signaling the task is paused.
    paused = Typed(Event)

    #: Inter-process event signaling the main thread is done, handling the
    #: measure resuming, and hence notifying the task execution has resumed.
    resumed = Typed(Event)

    #: Dictionary used to store errors occuring during performing.
    errors = Dict()

    #: Dictionary used to store references to resources that may need to be
    #: shared between task and which must be released when all tasks have been
    #: performed.
    #: Each key is associated to a different kind of resource. Resources must
    #: be stored in SharedDict subclass.
    #: By default three kind of resources exists:
    #:
    #: - threads : used threads grouped by pool.
    #: - active_threads : currently active threads.
    #: - instrs : used instruments referenced by profiles.
    #: - files : currently opened files by path.
    #:
    resources = Dict()

    #: Counter keeping track of the active threads.
    active_threads_counter = Typed(SharedCounter, kwargs={'count': 1})

    #: Counter keeping track of the paused threads.
    paused_threads_counter = Typed(SharedCounter, ())

    #: Thread from which the perform method has been called.
    thread_id = Int()

    # Setting default values for the root task.
    has_root = set_default(True)

    # Those must not be modified so freeze them
    name = Constant('Root')
    depth = Constant(0)
    path = Constant('root')
    database_entries = set_default({'default_path': ''})

    def __init__(self, *args, **kwargs):
        self.preferences = ConfigObj(indent_type='    ', encoding='utf-8')
        self.database = TaskDatabase()
        super(RootTask, self).__init__(*args, **kwargs)
        self.register_in_database()
        self.root = self
        self.parent = self
        self.active_threads_counter.observe('count', self._state)
        self.paused_threads_counter.observe('count', self._state)

    def check(self, *args, **kwargs):
        """Check that the default path is a valid directory.

        """
        traceback = {}
        test = True
        if not os.path.isdir(self.default_path):
            test = False
            traceback[self.path + '/' + self.name] =\
                'The provided default path is not a valid directory'
        self.write_in_database('default_path', self.default_path)
        check = super(RootTask, self).check(*args, **kwargs)
        test = test and check[0]
        traceback.update(check[1])
        return test, traceback

    @smooth_crash
    def perform(self):
        """Run sequentially all child tasks, and close ressources.

        """
        result = True
        self.thread_id = threading.current_thread().ident

        self.prepare()

        pr = Profile() if self.should_profile else None

        try:
            if pr:
                pr.enable()
            for child in self.children:
                child.perform_()
        except Exception:
            log = logging.getLogger(__name__)
            msg = 'The following unhandled exception occured :\n'
            log.exception(msg)
            self.should_stop.set()
            result = False
            self.errors['unhandled'] = msg + format_exc()
        finally:
            if pr:
                pr.disable()
                meas_name = self.get_from_database('meas_name')
                meas_id = self.get_from_database('meas_id')
                path = os.path.join(self.default_path,
                                    meas_name + '_' + meas_id + '.prof')
                pr.dump_stats(path)
            self.release_resources()

        if self.should_stop.is_set():
            result = False

        return result

    def prepare(self):
        """Optimise the database for running state and prepare children.

        """
        self.database.prepare_to_run()
        super(RootTask, self).prepare()

    def release_resources(self):
        """Release all the resources used by tasks.

        """
        # Release by priority to be sure that their is no-conflict
        # (Threads vs instruments for example)
        for resource in sorted(self.resources.values(),
                               key=attrgetter('priority')):
            resource.release()

    def register_in_database(self):
        """Don't create a node for the root task.

        """
        BaseTask.register_in_database(self)

        # ComplexTask defines children so we always get something
        for child in self.gather_children():
            child.register_in_database()

    @classmethod
    def build_from_config(cls, config, dependencies):
        """Create a new instance using the provided infos for initialisation.

        Parameters
        ----------
        config : dict(str)
            Dictionary holding the new values to give to the members in string
            format, or dictionnary like for instance with prefs.

        dependencies : dict
            Dictionary holding the necessary classes needed when rebuilding.
            This is assembled by the TaskManager.

        Returns
        -------
        task : RootTask
            Newly created and initiliazed task.

        Notes
        -----
        This method is fairly powerful and can handle a lot of cases so
        don't override it without checking that it works.

        """
        task = super(RootTask, cls).build_from_config(config, dependencies)
        task._post_setattr_root(None, task)
        task.register_in_database()
        task.register_preferences()
        return task

    # =========================================================================
    # --- Private API ---------------------------------------------------------
    # =========================================================================

    def _default_task_id(self):
        pack, _ = self.__module__.split('.', 1)
        return pack + '.' + ComplexTask.__name__

    def _child_path(self):
        """Overriden here to not add the task name.

        """
        return self.path

    def _task_entry(self, entry):
        """Do not prepend the name of the root task.

        """
        return entry

    def _state(self, change):
        """Determine whether the task is paused or not.

        This is done by checking the number of active and paused thread and
        setting accordingly the paused event.

        """
        p_count = self.paused_threads_counter.count
        a_count = self.active_threads_counter.count
        if a_count == p_count:
            self.paused.set()

        if p_count == 0:
            self.paused.clear()

    def _default_resources(self):
        """Default resources.

        """
        return {
            'threads': ThreadPoolResource(),
            # Reduce priority to stop through the thread resource.
            # This is far less likely to cause a deadlock.
            'active_threads': ThreadPoolResource(priority=0),
            'instrs': InstrsResource(),
            'files': FilesResource()
        }
Пример #28
0
class ConstrainableMixin(Atom):
    """ An atom mixin class which defines constraint members.

    This class implements the Constrainable interface.

    """
    #: The symbolic left boundary of the constrainable.
    left = ConstraintMember()

    #: The symbolic top boundary of the constrainable.
    top = ConstraintMember()

    #: The symbolic width of the constrainable.
    width = ConstraintMember()

    #: The symbolic height of the constrainable.
    height = ConstraintMember()

    #: A symbolic expression representing the top boundary.
    right = Constant()

    #: A symbolic expression representing the bottom boundary.
    bottom = Constant()

    #: A symbolic expression representing the horizontal center.
    h_center = Constant()

    #: A symbolic expression representing the vertical center.
    v_center = Constant()

    #: How strongly a widget hugs it's width hint. This is equivalent
    #: to the constraint:
    #:      (width == hint) | hug_width
    hug_width = PolicyEnum('strong')

    #: How strongly a widget hugs it's height hint. This is equivalent
    #: to the constraint:
    #:      (height == hint) | hug_height
    hug_height = PolicyEnum('strong')

    #: How strongly a widget resists clipping its width hint. This is
    #: equivalent to the constraint:
    #:      (width >= hint) | resist_width
    resist_width = PolicyEnum('strong')

    #: How strongly a widget resists clipping its height hint. This is
    #: iequivalent to the constraint:
    #:      (height >= hint) | resist_height
    resist_height = PolicyEnum('strong')

    #: How strongly a widget resists expanding its width hint. This is
    #: equivalent to the constraint:
    #:      (width <= hint) | limit_width
    limit_width = PolicyEnum('ignore')

    #: How strongly a widget resists expanding its height hint. This is
    #: equivalent to the constraint:
    #:      (height <= hint) | limit_height
    limit_height = PolicyEnum('ignore')

    def _default_right(self):
        return self.left + self.width

    def _default_bottom(self):
        return self.top + self.height

    def _default_h_center(self):
        return self.left + 0.5 * self.width

    def _default_v_center(self):
        return self.top + 0.5 * self.height
Пример #29
0
class Tek7000(AWG):
    numChannels = Int(default=2)
    seqFileExt = Constant('.awg')
Пример #30
0
class Modulation(HasEvaluableFields):
    """ Modulation to apply to the pulse.

    Only sinusoidal and cosinusoidal modulations are supported. As the
    modulation is applied on top of the shape is more complicated modulation
    are requested they can be implemented in cutom shapes.

    """
    #: Identifier for the build dependency collector
    dep_type = Constant(DEP_TYPE).tag(pref=True)

    #: Id of the modulation used to query it from the plugin.
    modulation_id = Unicode().tag(pref=True)

    #: Flag indicating whether or not the modulation is activated.
    activated = Bool().tag(pref=True)

    #: Kind of modulation to use : cos or sin
    kind = Enum('sin', 'cos').tag(pref=True)

    #: Frequency of modulation to use.
    frequency = Unicode().tag(pref=True, feval=Feval(types=Real))

    #: Unit of the frequency use for the modulation.
    frequency_unit = Enum('MHz', 'GHz', 'kHz', 'Hz').tag(pref=True)

    #: Phase to use in the modulation.
    phase = Unicode('0.0').tag(pref=True, feval=Feval(types=Real))

    #: Unit of the phase used in the modulation.
    phase_unit = Enum('rad', 'deg').tag(pref=True)

    #: Index of the parent pulse. This is set whe evaluating the entries.
    index = Int()

    def eval_entries(self, root_vars, sequence_locals, missing, errors):
        """ Evaluate amplitude, frequency, and phase.

        Parameters
        ----------
        root_vars : dict
            Global variables. As shapes and modulation cannot update them an
            empty dict is passed.

        sequence_locals : dict
            Known locals variables for the pulse sequence.

        missing : set
            Set of variables missing to evaluate some entries in the sequence.

        errors : dict
            Errors which occurred when trying to compile the pulse sequence.

        Returns
        -------
        result : bool
            Flag indicating whether or not the evaluation succeeded.

        """
        if not self.activated:
            return True

        return super(Modulation, self).eval_entries(root_vars, sequence_locals,
                                                    missing, errors)

    def compute(self, time, unit):
        """ Computes the modulation impact at a given time.

        Parameters
        ----------
        time : ndarray
            Times at which to compute the modulation.

        unit : str
            Unit in which the time is expressed.

        Returns
        -------
        modulation : ndarray
            Values by which to multiply the shape to get the pulse value at
            time t.

        """
        if not self.activated:
            return 1

        unit_corr = 2 * Pi * FREQ_TIME_UNIT_MAP[unit][self.frequency_unit]
        phase = self._cache['phase']
        if self.phase_unit == 'deg':
            phase *= Pi / 180

        if self.kind == 'sin':
            return np.sin(unit_corr * self._cache['frequency'] * time + phase)
        else:
            return np.cos(unit_corr * self._cache['frequency'] * time + phase)

    def format_error_id(self, member):
        """Assemble the id used to report an evaluation error.

        """
        return '{}_modulation_{}'.format(self.index, member)

    def format_global_vars_id(self, member):
        """Modulation is not allowed to store in the global namespace so raise.

        """
        msg = 'Modulation cannot store values as global (from pulse {})'
        raise RuntimeError(msg.format(self.index))

    # --- Private API ---------------------------------------------------------

    def _default_modulation_id(self):
        """Compute the class id.

        """
        pack, _ = self.__module__.split('.', 1)
        return pack + '.' + type(self).__name__