Exemplo n.º 1
0
    def test_success(self):
        """
        Succeeds if all wrapped validators succeed.
        """
        v = and_(instance_of(int), always_pass)

        v(None, simple_attr("test"), 42)
Exemplo n.º 2
0
 def test_subclass(self):
     """
     Subclasses are accepted too.
     """
     v = instance_of(int)
     # yep, bools are a subclass of int :(
     v(None, simple_attr("test"), True)
Exemplo n.º 3
0
    def test_fail(self):
        """
        Fails if any wrapped validator fails.
        """
        v = and_(instance_of(int), always_fail)

        with pytest.raises(ZeroDivisionError):
            v(None, simple_attr("test"), 42)
Exemplo n.º 4
0
 def __call__(self, inst, a, value):
     validators.instance_of(self.container_type)(inst, a, value)
     for n, element in enumerate(sorted(value)):
         inner_identifier = u"sorted({})[{}]".format(a.name, n)
         # Create an Attribute with a name that refers to the
         # validator we're using and the index we're validating.
         # Otherwise the validation failure is pretty confusing.
         inner_attr = attr.Attribute(
             name=inner_identifier,
             default=None,
             validator=self.validator,
             repr=False,
             cmp=False,
             hash=False,
             init=False,
         )
         self.validator(inst, inner_attr, element)
Exemplo n.º 5
0
 def test_repr(self):
     """
     Returned validator has a useful `__repr__`.
     """
     v = instance_of(int)
     assert (
         "<instance_of validator for type <{type} 'int'>>"
         .format(type=TYPE)
     ) == repr(v)
Exemplo n.º 6
0
 def test_validator_others(self):
     """
     Does not interfere when setting non-attrs attributes.
     """
     C = make_class("C", {"a": attr("a", validator=instance_of(int))})
     i = C(1)
     i.b = "foo"
     assert 1 == i.a
     assert "foo" == i.b
Exemplo n.º 7
0
 def test_repr(self):
     """
     Returned validator has a useful `__repr__`.
     """
     v = optional(instance_of(int))
     assert (
         ("<optional validator for <instance_of validator for type "
          "<{type} 'int'>> or None>")
         .format(type=TYPE)
     ) == repr(v)
Exemplo n.º 8
0
def test_lazy_model_init(model_type):
    from attr import validators
    from coalaip.models import LazyLoadableModel
    ld_context = 'ld_context'
    validator = validators.instance_of(dict)

    model = LazyLoadableModel(ld_type=model_type, ld_context=ld_context,
                              validator=validator)
    assert model.loaded_model is None
    assert model.ld_type == model_type
    assert model.ld_context == ld_context
    assert model.validator == validator
Exemplo n.º 9
0
def test_model_immutable(model_data, model_type):
    from attr import validators
    from attr.exceptions import FrozenInstanceError
    from coalaip.models import Model
    model = Model(data=model_data, ld_type=model_type)
    with raises(FrozenInstanceError):
        model.data = {'other': 'other'}
    with raises(FrozenInstanceError):
        model.ld_type = 'other_type'
    with raises(FrozenInstanceError):
        model.ld_context = 'other_context'
    with raises(FrozenInstanceError):
        model.validator = validators.instance_of(str)
Exemplo n.º 10
0
    def test_fail(self):
        """
        Raises `TypeError` on wrong types.
        """
        v = instance_of(int)
        a = simple_attr("test")
        with pytest.raises(TypeError) as e:
            v(None, a, "42")
        assert (
            "'test' must be <{type} 'int'> (got '42' that is a <{type} "
            "'str'>).".format(type=TYPE),
            a, int, "42",

        ) == e.value.args
Exemplo n.º 11
0
 def test_validator_others(self, slots):
     """
     Does not interfere when setting non-attrs attributes.
     """
     C = make_class("C", {"a": attr("a", validator=instance_of(int))},
                    slots=slots)
     i = C(1)
     assert 1 == i.a
     if not slots:
         i.b = "foo"
         assert "foo" == i.b
     else:
         with pytest.raises(AttributeError):
             i.b = "foo"
Exemplo n.º 12
0
class DisplayInterpreterSystem(UpdateSystem):
    """
    Parse the default output stream and write the result to the display buffer.
    """
    _tab_width = attr.ib(validator=instance_of(int))
    _encoding = attr.ib(validator=instance_of(str))

    @classmethod
    def create(cls, encoding="utf-8"):
        return cls(
            component_types=(DisplayBuffer, InputOutputStream, MachineState),
            is_applicator=True,
            tab_width=4,
            encoding=encoding,
            log=cls.get_logger()
        )

    def update(self, time, delta_time, world, components):
        """
        For each entity with a DisplayBuffer, interpret the default output stream
        registered with the entity and output the result to the DisplayBuffer.

        :param time:
        :param delta_time:
        :param world:
        :param components:
        :return:
        """
        for buffer, stream, machine in components:
            if any((machine.power_up, machine.ready, machine.power_down)) and len(stream.output) > 0:
                try:
                    self._interpret(buffer, stream.output)
                finally:
                    stream.output.clear()

    def _interpret(self, buffer, byte_stream):
        for b in byte_stream:
            row, column = buffer.cursor

            # Parse the individual characters
            if b.to_bytes(1, sys.byteorder).decode(self._encoding).isprintable():
                buffer.buffer[row, column] = b.to_bytes(1, sys.byteorder)
                column += 1
            elif b == 0x00:
                warnings.warn("Null character not implemented.", FixmeWarning)
            elif b == 0x07:
                warnings.warn("Bell not implemented.", FixmeWarning)
            elif b == 0x08:
                warnings.warn("Backspace not implemented.", FixmeWarning)
            elif b == 0x09:
                column += self._tab_width - (column % self._tab_width)
            elif b == 0x0a:
                column = 0
                row += 1
            elif b == 0x0b:
                warnings.warn("Vertical tab not implemented.", FixmeWarning)
            elif b == 0x0c:
                warnings.warn("Form feed not implemented.", FixmeWarning)
            elif b == 0x0d:
                warnings.warn("Carriage return not implemented.", FixmeWarning)
            elif b == 0x1a:
                warnings.warn("End of file not implemented.", FixmeWarning)
            elif b == 0x1b:
                warnings.warn("Escape character not implemented.", FixmeWarning)
            elif b == 0x7f:
                warnings.warn("Delete character not implemented.", FixmeWarning)
            else:
                self._log.debug("Got an unhandled character: {!r}".format(b))

            # Wrap around the beginning and end of a row.
            if column >= buffer.shape[1]:
                column = 0
                row += 1
            elif column < 0:
                column = buffer.shape[1] - 1
                row -= 1

            buffer.cursor = (row, column)
Exemplo n.º 13
0
class PerceptionGraphWithReplacedObjectResult:
    perception_graph_after_replacement: PerceptionGraph = attrib(
        validator=instance_of(PerceptionGraph))
    removed_nodes: ImmutableSet[PerceptionGraphNode] = attrib(
        validator=instance_of(ImmutableSet))
Exemplo n.º 14
0
class _SubchannelAddress(object):
    _scid = attrib(validator=instance_of(six.integer_types))
Exemplo n.º 15
0
 def test_success_with_none(self):
     """
     Nothing happens if None.
     """
     v = optional(instance_of(int))
     v(None, simple_attr("test"), None)
Exemplo n.º 16
0
class DependencyTree:
    r"""
    A syntactic dependency tree.

    This consists of `DependencyTreeToken`\ s
    connected by edges labelled with `DependencyRole`\ s.
    Edges run from modifiers to heads.

    Note a `DependencyTree` is not a `LinguisticDescription`
    because it does not provide a surface token string,
    since the dependencies are unordered.

    You can pair a `DependencyTree` with a surface order
    to create an `LinearizedDependencyTree`.
    """

    _graph: DiGraph = attrib(validator=instance_of(DiGraph))
    root: "DependencyTreeToken" = attrib(init=False)
    """
    The unique root `DependencyTreeToken` of the tree.
    
    This is the single token which does not modify any other token.
    """
    tokens: ImmutableSet["DependencyTreeToken"] = attrib(init=False)
    r"""
    The set of all `DependencyTreeToken`\ s appearing in this tree.
    """
    def modifiers(
        self, head: "DependencyTreeToken"
    ) -> ImmutableSet[Tuple["DependencyTreeToken", "DependencyRole"]]:
        r"""
        All `DependencyTreeToken`\ s modifying *head* and their `DependencyRole`\ s.

        Returns:
            A set of (`DependencyTreeToken`, `DependencyRole`) tuples
            corresponding to all modifications of *head*.
        """
        return immutableset(
            ((source, role)
             for (source, target,
                  role) in self._graph.in_edges(head, data="role")),
            disable_order_check=True,
        )

    @root.default
    def _init_root(self) -> "DependencyTreeToken":
        roots = [
            node for node in self._graph.nodes()
            if self._graph.out_degree(node) == 0
        ]
        if len(roots) == 1:
            return roots[0]
        elif roots:
            raise RuntimeError(f"Dependency tree has multiple roots: {roots}")
        else:
            if self._graph:
                raise RuntimeError("Dependency tree has no roots")
            else:
                raise RuntimeError(
                    "Cannot initialize a dependency tree from an empty graph")

    @tokens.default
    def _init_tokens(self) -> ImmutableSet["DependencyTreeToken"]:
        return immutableset(self._graph.nodes, disable_order_check=True)

    def __attrs_post_init__(self) -> None:
        bad_edges = [(source, target)
                     for (source, target,
                          role) in self._graph.edges(data="role")
                     if role is None]
        if bad_edges:
            raise RuntimeError(
                "Cannot construct a dependency tree with edges which lack roles: "
                + ", ".join(f"({source}, {target}"
                            for (source, target) in bad_edges))
Exemplo n.º 17
0
class Delete(Request):
    physical_resource_id = attr.ib(validator=instance_of(str), )
Exemplo n.º 18
0
class TestDeepIterable:
    """
    Tests for `deep_iterable`.
    """
    def test_in_all(self):
        """
        Verify that this validator is in ``__all__``.
        """
        assert deep_iterable.__name__ in validator_module.__all__

    def test_success_member_only(self, member_validator):
        """
        If the member validator succeeds and the iterable validator is not set,
        nothing happens.
        """
        v = deep_iterable(member_validator)
        a = simple_attr("test")
        v(None, a, [42])

    def test_success_member_and_iterable(self, member_validator):
        """
        If both the member and iterable validators succeed, nothing happens.
        """
        iterable_validator = instance_of(list)
        v = deep_iterable(member_validator, iterable_validator)
        a = simple_attr("test")
        v(None, a, [42])

    @pytest.mark.parametrize(
        "member_validator, iterable_validator",
        (
            (instance_of(int), 42),
            (42, instance_of(list)),
            (42, 42),
            (42, None),
            ([instance_of(int), 42], 42),
            ([42, instance_of(int)], 42),
        ),
    )
    def test_noncallable_validators(self, member_validator,
                                    iterable_validator):
        """
        Raise `TypeError` if any validators are not callable.
        """
        with pytest.raises(TypeError) as e:
            deep_iterable(member_validator, iterable_validator)
        value = 42
        message = "must be callable (got {value} that is a {type_}).".format(
            value=value, type_=value.__class__)

        assert message in e.value.args[0]
        assert value == e.value.args[1]
        assert message in e.value.msg
        assert value == e.value.value

    def test_fail_invalid_member(self, member_validator):
        """
        Raise member validator error if an invalid member is found.
        """
        v = deep_iterable(member_validator)
        a = simple_attr("test")
        with pytest.raises(TypeError):
            v(None, a, [42, "42"])

    def test_fail_invalid_iterable(self, member_validator):
        """
        Raise iterable validator error if an invalid iterable is found.
        """
        member_validator = instance_of(int)
        iterable_validator = instance_of(tuple)
        v = deep_iterable(member_validator, iterable_validator)
        a = simple_attr("test")
        with pytest.raises(TypeError):
            v(None, a, [42])

    def test_fail_invalid_member_and_iterable(self, member_validator):
        """
        Raise iterable validator error if both the iterable
        and a member are invalid.
        """
        iterable_validator = instance_of(tuple)
        v = deep_iterable(member_validator, iterable_validator)
        a = simple_attr("test")
        with pytest.raises(TypeError):
            v(None, a, [42, "42"])

    def test_repr_member_only(self):
        """
        Returned validator has a useful `__repr__`
        when only member validator is set.
        """
        member_validator = instance_of(int)
        member_repr = "<instance_of validator for type <class 'int'>>"
        v = deep_iterable(member_validator)
        expected_repr = (
            "<deep_iterable validator for iterables of {member_repr}>").format(
                member_repr=member_repr)
        assert expected_repr == repr(v)

    def test_repr_member_only_sequence(self):
        """
        Returned validator has a useful `__repr__`
        when only member validator is set and the member validator is a list of
        validators
        """
        member_validator = [always_pass, instance_of(int)]
        member_repr = (
            "_AndValidator(_validators=({func}, "
            "<instance_of validator for type <class 'int'>>))").format(
                func=repr(always_pass))
        v = deep_iterable(member_validator)
        expected_repr = (
            "<deep_iterable validator for iterables of {member_repr}>").format(
                member_repr=member_repr)
        assert expected_repr == repr(v)

    def test_repr_member_and_iterable(self):
        """
        Returned validator has a useful `__repr__` when both member
        and iterable validators are set.
        """
        member_validator = instance_of(int)
        member_repr = "<instance_of validator for type <class 'int'>>"
        iterable_validator = instance_of(list)
        iterable_repr = "<instance_of validator for type <class 'list'>>"
        v = deep_iterable(member_validator, iterable_validator)
        expected_repr = (
            "<deep_iterable validator for"
            " {iterable_repr} iterables of {member_repr}>").format(
                iterable_repr=iterable_repr, member_repr=member_repr)
        assert expected_repr == repr(v)

    def test_repr_sequence_member_and_iterable(self):
        """
        Returned validator has a useful `__repr__` when both member
        and iterable validators are set and the member validator is a list of
        validators
        """
        member_validator = [always_pass, instance_of(int)]
        member_repr = (
            "_AndValidator(_validators=({func}, "
            "<instance_of validator for type <class 'int'>>))").format(
                func=repr(always_pass))
        iterable_validator = instance_of(list)
        iterable_repr = "<instance_of validator for type <class 'list'>>"
        v = deep_iterable(member_validator, iterable_validator)
        expected_repr = (
            "<deep_iterable validator for"
            " {iterable_repr} iterables of {member_repr}>").format(
                iterable_repr=iterable_repr, member_repr=member_repr)

        assert expected_repr == repr(v)
Exemplo n.º 19
0
class TestDeepMapping:
    """
    Tests for `deep_mapping`.
    """
    def test_in_all(self):
        """
        Verify that this validator is in ``__all__``.
        """
        assert deep_mapping.__name__ in validator_module.__all__

    def test_success(self):
        """
        If both the key and value validators succeed, nothing happens.
        """
        key_validator = instance_of(str)
        value_validator = instance_of(int)
        v = deep_mapping(key_validator, value_validator)
        a = simple_attr("test")
        v(None, a, {"a": 6, "b": 7})

    @pytest.mark.parametrize(
        "key_validator, value_validator, mapping_validator",
        (
            (42, instance_of(int), None),
            (instance_of(str), 42, None),
            (instance_of(str), instance_of(int), 42),
            (42, 42, None),
            (42, 42, 42),
        ),
    )
    def test_noncallable_validators(self, key_validator, value_validator,
                                    mapping_validator):
        """
        Raise `TypeError` if any validators are not callable.
        """
        with pytest.raises(TypeError) as e:
            deep_mapping(key_validator, value_validator, mapping_validator)

        value = 42
        message = "must be callable (got {value} that is a {type_}).".format(
            value=value, type_=value.__class__)

        assert message in e.value.args[0]
        assert value == e.value.args[1]
        assert message in e.value.msg
        assert value == e.value.value

    def test_fail_invalid_mapping(self):
        """
        Raise `TypeError` if mapping validator fails.
        """
        key_validator = instance_of(str)
        value_validator = instance_of(int)
        mapping_validator = instance_of(dict)
        v = deep_mapping(key_validator, value_validator, mapping_validator)
        a = simple_attr("test")
        with pytest.raises(TypeError):
            v(None, a, None)

    def test_fail_invalid_key(self):
        """
        Raise key validator error if an invalid key is found.
        """
        key_validator = instance_of(str)
        value_validator = instance_of(int)
        v = deep_mapping(key_validator, value_validator)
        a = simple_attr("test")
        with pytest.raises(TypeError):
            v(None, a, {"a": 6, 42: 7})

    def test_fail_invalid_member(self):
        """
        Raise key validator error if an invalid member value is found.
        """
        key_validator = instance_of(str)
        value_validator = instance_of(int)
        v = deep_mapping(key_validator, value_validator)
        a = simple_attr("test")
        with pytest.raises(TypeError):
            v(None, a, {"a": "6", "b": 7})

    def test_repr(self):
        """
        Returned validator has a useful `__repr__`.
        """
        key_validator = instance_of(str)
        key_repr = "<instance_of validator for type <class 'str'>>"
        value_validator = instance_of(int)
        value_repr = "<instance_of validator for type <class 'int'>>"
        v = deep_mapping(key_validator, value_validator)
        expected_repr = ("<deep_mapping validator for objects mapping "
                         "{key_repr} to {value_repr}>").format(
                             key_repr=key_repr, value_repr=value_repr)
        assert expected_repr == repr(v)
Exemplo n.º 20
0
 def test_repr(self):
     """
     Returned validator has a useful `__repr__`.
     """
     v = instance_of(int)
     assert ("<instance_of validator for type <class 'int'>>") == repr(v)
Exemplo n.º 21
0
 class C:
     a1 = attr.ib("a1", validator=and_(instance_of(int)))
     a2 = attr.ib("a2", validator=[instance_of(int)])
Exemplo n.º 22
0
            ifoo,
            value,
        ) == e.value.args

    def test_repr(self, ifoo):
        """
        Returned validator has a useful `__repr__`.
        """
        v = provides(ifoo)
        assert ("<provides validator for interface {interface!r}>".format(
            interface=ifoo)) == repr(v)


@pytest.mark.parametrize(
    "validator",
    [instance_of(int), [always_pass, instance_of(int)]])
class TestOptional:
    """
    Tests for `optional`.
    """
    def test_in_all(self, validator):
        """
        Verify that this validator is in ``__all__``.
        """
        assert optional.__name__ in validator_module.__all__

    def test_success(self, validator):
        """
        Nothing happens if validator succeeds.
        """
        v = optional(validator)
Exemplo n.º 23
0
class Manager(object):
    _S = attrib(validator=provides(ISend), repr=False)
    _my_side = attrib(validator=instance_of(type(u"")))
    _transit_relay_location = attrib(validator=optional(instance_of(str)))
    _reactor = attrib(repr=False)
    _eventual_queue = attrib(repr=False)
    _cooperator = attrib(repr=False)
    # TODO: can this validator work when the parameter is optional?
    _no_listen = attrib(validator=instance_of(bool), default=False)

    _dilation_key = None
    _tor = None  # TODO
    _timing = None  # TODO
    _next_subchannel_id = None  # initialized in choose_role

    m = MethodicalMachine()
    set_trace = getattr(m, "_setTrace",
                        lambda self, f: None)  # pragma: no cover

    def __attrs_post_init__(self):
        self._got_versions_d = Deferred()

        self._my_role = None  # determined upon rx_PLEASE
        self._host_addr = _WormholeAddress()

        self._connection = None
        self._made_first_connection = False
        self._stopped = OneShotObserver(self._eventual_queue)
        self._debug_stall_connector = False

        self._next_dilation_phase = 0

        # I kept getting confused about which methods were for inbound data
        # (and thus flow-control methods go "out") and which were for
        # outbound data (with flow-control going "in"), so I split them up
        # into separate pieces.
        self._inbound = Inbound(self, self._host_addr)
        self._outbound = Outbound(self, self._cooperator)  # from us to peer

        # We must open subchannel0 early, since messages may arrive very
        # quickly once the connection is established. This subchannel may or
        # may not ever get revealed to the caller, since the peer might not
        # even be capable of dilation.
        scid0 = 0
        peer_addr0 = _SubchannelAddress(scid0)
        sc0 = SubChannel(scid0, self, self._host_addr, peer_addr0)
        self._inbound.set_subchannel_zero(scid0, sc0)

        # we can open non-zero subchannels as soon as we get our first
        # connection, and we can make the Endpoints even earlier
        control_ep = ControlEndpoint(peer_addr0, sc0, self._eventual_queue)
        connect_ep = SubchannelConnectorEndpoint(self, self._host_addr,
                                                 self._eventual_queue)
        listen_ep = SubchannelListenerEndpoint(self, self._host_addr,
                                               self._eventual_queue)
        # TODO: let inbound/outbound create the endpoints, then return them
        # to us
        self._inbound.set_listener_endpoint(listen_ep)

        self._endpoints = EndpointRecord(control_ep, connect_ep, listen_ep)

    def get_endpoints(self):
        return self._endpoints

    def got_dilation_key(self, key):
        assert isinstance(key, bytes)
        self._dilation_key = key

    def got_wormhole_versions(self, their_wormhole_versions):
        # this always happens before received_dilation_message
        dilation_version = None
        their_dilation_versions = set(
            their_wormhole_versions.get("can-dilate", []))
        my_versions = set(DILATION_VERSIONS)
        shared_versions = my_versions.intersection(their_dilation_versions)
        if "1" in shared_versions:
            dilation_version = "1"

        # dilation_version is the best mutually-compatible version we have
        # with the peer, or None if we have nothing in common

        if not dilation_version:  # "1" or None
            # TODO: be more specific about the error. dilation_version==None
            # means we had no version in common with them, which could either
            # be because they're so old they don't dilate at all, or because
            # they're so new that they no longer accomodate our old version
            self.fail(failure.Failure(OldPeerCannotDilateError()))

        self.start()

    def fail(self, f):
        self._endpoints.control._main_channel_failed(f)
        self._endpoints.connect._main_channel_failed(f)
        self._endpoints.listen._main_channel_failed(f)

    def received_dilation_message(self, plaintext):
        # this receives new in-order DILATE-n payloads, decrypted but not
        # de-JSONed.

        message = bytes_to_dict(plaintext)
        type = message["type"]
        if type == "please":
            self.rx_PLEASE(message)
        elif type == "connection-hints":
            self.rx_HINTS(message)
        elif type == "reconnect":
            self.rx_RECONNECT()
        elif type == "reconnecting":
            self.rx_RECONNECTING()
        else:
            log.err(UnknownDilationMessageType(message))
            return

    def when_stopped(self):
        return self._stopped.when_fired()

    def send_dilation_phase(self, **fields):
        dilation_phase = self._next_dilation_phase
        self._next_dilation_phase += 1
        self._S.send("dilate-%d" % dilation_phase, dict_to_bytes(fields))

    def send_hints(self, hints):  # from Connector
        self.send_dilation_phase(type="connection-hints", hints=hints)

    # forward inbound-ish things to _Inbound

    def subchannel_pauseProducing(self, sc):
        self._inbound.subchannel_pauseProducing(sc)

    def subchannel_resumeProducing(self, sc):
        self._inbound.subchannel_resumeProducing(sc)

    def subchannel_stopProducing(self, sc):
        self._inbound.subchannel_stopProducing(sc)

    def subchannel_local_open(self, scid, sc):
        self._inbound.subchannel_local_open(scid, sc)

    # forward outbound-ish things to _Outbound
    def subchannel_registerProducer(self, sc, producer, streaming):
        self._outbound.subchannel_registerProducer(sc, producer, streaming)

    def subchannel_unregisterProducer(self, sc):
        self._outbound.subchannel_unregisterProducer(sc)

    def send_open(self, scid):
        assert isinstance(scid, six.integer_types)
        self._queue_and_send(Open, scid)

    def send_data(self, scid, data):
        assert isinstance(scid, six.integer_types)
        self._queue_and_send(Data, scid, data)

    def send_close(self, scid):
        assert isinstance(scid, six.integer_types)
        self._queue_and_send(Close, scid)

    def _queue_and_send(self, record_type, *args):
        r = self._outbound.build_record(record_type, *args)
        # Outbound owns the send_record() pipe, so that it can stall new
        # writes after a new connection is made until after all queued
        # messages are written (to preserve ordering).
        self._outbound.queue_and_send_record(r)  # may trigger pauseProducing

    def subchannel_closed(self, scid, sc):
        # let everyone clean up. This happens just after we delivered
        # connectionLost to the Protocol, except for the control channel,
        # which might get connectionLost later after they use ep.connect.
        # TODO: is this inversion a problem?
        self._inbound.subchannel_closed(scid, sc)
        self._outbound.subchannel_closed(scid, sc)

    # our Connector calls these

    def connector_connection_made(self, c):
        self.connection_made()  # state machine update
        self._connection = c
        self._inbound.use_connection(c)
        self._outbound.use_connection(c)  # does c.registerProducer
        if not self._made_first_connection:
            self._made_first_connection = True
            self._endpoints.control._main_channel_ready()
            self._endpoints.connect._main_channel_ready()
            self._endpoints.listen._main_channel_ready()
        pass

    def connector_connection_lost(self):
        self._stop_using_connection()
        if self._my_role is LEADER:
            self.connection_lost_leader()  # state machine
        else:
            self.connection_lost_follower()

    def _stop_using_connection(self):
        # the connection is already lost by this point
        self._connection = None
        self._inbound.stop_using_connection()
        self._outbound.stop_using_connection()  # does c.unregisterProducer

    # from our active Connection

    def got_record(self, r):
        # records with sequence numbers: always ack, ignore old ones
        if isinstance(r, (Open, Data, Close)):
            self.send_ack(r.seqnum)  # always ack, even for old ones
            if self._inbound.is_record_old(r):
                return
            self._inbound.update_ack_watermark(r.seqnum)
            if isinstance(r, Open):
                self._inbound.handle_open(r.scid)
            elif isinstance(r, Data):
                self._inbound.handle_data(r.scid, r.data)
            else:  # isinstance(r, Close)
                self._inbound.handle_close(r.scid)
            return
        if isinstance(r, KCM):
            log.err(UnexpectedKCM())
        elif isinstance(r, Ping):
            self.handle_ping(r.ping_id)
        elif isinstance(r, Pong):
            self.handle_pong(r.ping_id)
        elif isinstance(r, Ack):
            self._outbound.handle_ack(r.resp_seqnum)  # retire queued messages
        else:
            log.err(UnknownMessageType("{}".format(r)))

    # pings, pongs, and acks are not queued
    def send_ping(self, ping_id):
        self._outbound.send_if_connected(Ping(ping_id))

    def send_pong(self, ping_id):
        self._outbound.send_if_connected(Pong(ping_id))

    def send_ack(self, resp_seqnum):
        self._outbound.send_if_connected(Ack(resp_seqnum))

    def handle_ping(self, ping_id):
        self.send_pong(ping_id)

    def handle_pong(self, ping_id):
        # TODO: update is-alive timer
        pass

    # subchannel maintenance
    def allocate_subchannel_id(self):
        scid_num = self._next_subchannel_id
        self._next_subchannel_id += 2
        return scid_num

    # state machine

    @m.state(initial=True)
    def WAITING(self):
        pass  # pragma: no cover

    @m.state()
    def WANTING(self):
        pass  # pragma: no cover

    @m.state()
    def CONNECTING(self):
        pass  # pragma: no cover

    @m.state()
    def CONNECTED(self):
        pass  # pragma: no cover

    @m.state()
    def FLUSHING(self):
        pass  # pragma: no cover

    @m.state()
    def ABANDONING(self):
        pass  # pragma: no cover

    @m.state()
    def LONELY(self):
        pass  # pragma: no cover

    @m.state()
    def STOPPING(self):
        pass  # pragma: no cover

    @m.state(terminal=True)
    def STOPPED(self):
        pass  # pragma: no cover

    @m.input()
    def start(self):
        pass  # pragma: no cover

    @m.input()
    def rx_PLEASE(self, message):
        pass  # pragma: no cover

    @m.input()  # only sent by Follower
    def rx_HINTS(self, hint_message):
        pass  # pragma: no cover

    @m.input()  # only Leader sends RECONNECT, so only Follower receives it
    def rx_RECONNECT(self):
        pass  # pragma: no cover

    @m.input()  # only Follower sends RECONNECTING, so only Leader receives it
    def rx_RECONNECTING(self):
        pass  # pragma: no cover

    # Connector gives us connection_made()
    @m.input()
    def connection_made(self):
        pass  # pragma: no cover

    # our connection_lost() fires connection_lost_leader or
    # connection_lost_follower depending upon our role. If either side sees a
    # problem with the connection (timeouts, bad authentication) then they
    # just drop it and let connection_lost() handle the cleanup.
    @m.input()
    def connection_lost_leader(self):
        pass  # pragma: no cover

    @m.input()
    def connection_lost_follower(self):
        pass

    @m.input()
    def stop(self):
        pass  # pragma: no cover

    @m.output()
    def send_please(self):
        self.send_dilation_phase(type="please", side=self._my_side)

    @m.output()
    def choose_role(self, message):
        their_side = message["side"]
        if self._my_side > their_side:
            self._my_role = LEADER
            # scid 0 is reserved for the control channel. the leader uses odd
            # numbers starting with 1
            self._next_subchannel_id = 1
        elif their_side > self._my_side:
            self._my_role = FOLLOWER
            # the follower uses even numbers starting with 2
            self._next_subchannel_id = 2
        else:
            raise ValueError("their side shouldn't be equal: reflection?")

    # these Outputs behave differently for the Leader vs the Follower

    @m.output()
    def start_connecting_ignore_message(self, message):
        del message  # ignored
        return self._start_connecting()

    @m.output()
    def start_connecting(self):
        self._start_connecting()

    def _start_connecting(self):
        assert self._my_role is not None
        assert self._dilation_key is not None
        self._connector = Connector(
            self._dilation_key,
            self._transit_relay_location,
            self,
            self._reactor,
            self._eventual_queue,
            self._no_listen,
            self._tor,
            self._timing,
            self._my_side,  # needed for relay handshake
            self._my_role)
        if self._debug_stall_connector:
            # unit tests use this hook to send messages while we know we
            # don't have a connection
            self._eventual_queue.eventually(self._debug_stall_connector,
                                            self._connector)
            return
        self._connector.start()

    @m.output()
    def send_reconnect(self):
        self.send_dilation_phase(type="reconnect")  # TODO: generation number?

    @m.output()
    def send_reconnecting(self):
        self.send_dilation_phase(type="reconnecting")  # TODO: generation?

    @m.output()
    def use_hints(self, hint_message):
        hint_objs = filter(
            lambda h: h,  # ignore None, unrecognizable
            [parse_hint(hs) for hs in hint_message["hints"]])
        hint_objs = list(hint_objs)
        self._connector.got_hints(hint_objs)

    @m.output()
    def stop_connecting(self):
        self._connector.stop()

    @m.output()
    def abandon_connection(self):
        # we think we're still connected, but the Leader disagrees. Or we've
        # been told to shut down.
        self._connection.disconnect()  # let connection_lost do cleanup

    @m.output()
    def notify_stopped(self):
        self._stopped.fire(None)

    # We are born WAITING after the local app calls w.dilate(). We enter
    # WANTING (and send a PLEASE) when we learn of a mutually-compatible
    # dilation_version.
    WAITING.upon(start, enter=WANTING, outputs=[send_please])

    # we start CONNECTING when we get rx_PLEASE
    WANTING.upon(rx_PLEASE,
                 enter=CONNECTING,
                 outputs=[choose_role, start_connecting_ignore_message])

    CONNECTING.upon(connection_made, enter=CONNECTED, outputs=[])

    # Leader
    CONNECTED.upon(connection_lost_leader,
                   enter=FLUSHING,
                   outputs=[send_reconnect])
    FLUSHING.upon(rx_RECONNECTING,
                  enter=CONNECTING,
                  outputs=[start_connecting])

    # Follower
    # if we notice a lost connection, just wait for the Leader to notice too
    CONNECTED.upon(connection_lost_follower, enter=LONELY, outputs=[])
    LONELY.upon(rx_RECONNECT,
                enter=CONNECTING,
                outputs=[send_reconnecting, start_connecting])
    # but if they notice it first, abandon our (seemingly functional)
    # connection, then tell them that we're ready to try again
    CONNECTED.upon(rx_RECONNECT,
                   enter=ABANDONING,
                   outputs=[abandon_connection])
    ABANDONING.upon(connection_lost_follower,
                    enter=CONNECTING,
                    outputs=[send_reconnecting, start_connecting])
    # and if they notice a problem while we're still connecting, abandon our
    # incomplete attempt and try again. in this case we don't have to wait
    # for a connection to finish shutdown
    CONNECTING.upon(
        rx_RECONNECT,
        enter=CONNECTING,
        outputs=[stop_connecting, send_reconnecting, start_connecting])

    # rx_HINTS never changes state, they're just accepted or ignored
    WANTING.upon(rx_HINTS, enter=WANTING, outputs=[])  # too early
    CONNECTING.upon(rx_HINTS, enter=CONNECTING, outputs=[use_hints])
    CONNECTED.upon(rx_HINTS, enter=CONNECTED, outputs=[])  # too late, ignore
    FLUSHING.upon(rx_HINTS, enter=FLUSHING, outputs=[])  # stale, ignore
    LONELY.upon(rx_HINTS, enter=LONELY, outputs=[])  # stale, ignore
    ABANDONING.upon(rx_HINTS, enter=ABANDONING, outputs=[])  # shouldn't happen
    STOPPING.upon(rx_HINTS, enter=STOPPING, outputs=[])

    WAITING.upon(stop, enter=STOPPED, outputs=[notify_stopped])
    WANTING.upon(stop, enter=STOPPED, outputs=[notify_stopped])
    CONNECTING.upon(stop,
                    enter=STOPPED,
                    outputs=[stop_connecting, notify_stopped])
    CONNECTED.upon(stop, enter=STOPPING, outputs=[abandon_connection])
    ABANDONING.upon(stop, enter=STOPPING, outputs=[])
    FLUSHING.upon(stop, enter=STOPPED, outputs=[notify_stopped])
    LONELY.upon(stop, enter=STOPPED, outputs=[notify_stopped])
    STOPPING.upon(connection_lost_leader,
                  enter=STOPPED,
                  outputs=[notify_stopped])
    STOPPING.upon(connection_lost_follower,
                  enter=STOPPED,
                  outputs=[notify_stopped])
Exemplo n.º 24
0
class DisplaySystem(UpdateSystem):
    """
    Copy the data from the terminal display buffer to a texture.
    """
    _font = attr.ib(validator=instance_of(sdl2.sdlttf.TTF_Font))
    _font_color = attr.ib(validator=instance_of(sdl2.pixels.SDL_Color))
    _font_size = attr.ib(validator=instance_of(int))
    _renderer = attr.ib(validator=instance_of(sdl2.render.SDL_Renderer))

    @classmethod
    def create(cls, renderer, resource_manager,
               font_name="CourierCode-Roman.ttf", font_size=14, font_color=(0xff, 0xff, 0xff, 0xff)):
        """
        Create a terminal display system.

        :return:
        """
        color = sdl2.pixels.SDL_Color(*font_color)
        font_path = resource_manager.get_path(font_name)
        font = sdl2.sdlttf.TTF_OpenFont(font_path.encode("utf-8"), font_size)
        if not font:
            raise SDLTTFError()

        return cls(
            component_types=(DisplayBuffer, MachineState, Sprite),
            is_applicator=True,
            font=font.contents,
            font_color=color,
            font_size=font_size,
            renderer=renderer.contents,
            log=cls.get_logger()
        )

    def update(self, time, delta_time, world, components):
        """
        For each entity which has a Sprite and a DisplayBuffer,
        copy the contents of the DisplayBuffer to the Sprite for
        rendering.

        :param time:
        :param delta_time:
        :param world:
        :param components:
        :return:
        """
        for buffer, machine, sprite in components:
            if any((machine.power_up, machine.ready, machine.power_down)) and (not buffer.empty and buffer.modified):
                surf = sdl2.sdlttf.TTF_RenderUTF8_Blended_Wrapped(
                    self._font, buffer.to_bytes(), self._font_color, sprite.shape[0]
                )
                if not surf:
                    raise SDLTTFError()

                try:
                    tx = sdl2.render.SDL_CreateTextureFromSurface(
                        self._renderer, surf.contents
                    )
                    if not tx:
                        raise SDLTTFError()

                    try:
                        min_shape = [min(a, b) for a, b in zip(self._get_tx_shape(tx), sprite.shape)]
                        dest_rect = sdl2.render.SDL_Rect(0, 0, *min_shape)

                        if sdl2.render.SDL_SetRenderTarget(self._renderer, sprite.texture) != 0:
                            raise SDLError()

                        if sdl2.render.SDL_RenderClear(self._renderer) != 0:
                            raise SDLError()

                        if sdl2.render.SDL_RenderCopy(self._renderer, tx.contents, None, dest_rect) != 0:
                            raise SDLError()

                        if sdl2.render.SDL_SetRenderTarget(self._renderer, None) != 0:
                            raise SDLError()
                    finally:
                        sdl2.render.SDL_DestroyTexture(tx)

                finally:
                    sdl2.surface.SDL_FreeSurface(surf)

    def _get_text_shape(self, text_bytes):
        text_width = ctypes.c_int()
        text_height = ctypes.c_int()
        if sdl2.sdlttf.TTF_SizeUTF8(self._font, text_bytes, ctypes.byref(text_width), ctypes.byref(text_height)) != 0:
            raise SDLTTFError()

        return text_width.value, text_height.value

    def _get_tx_shape(self, texture):
        """
        Determine the texture shape.

        :param texture:
        :return:
        """
        flags = ctypes.c_uint32()
        access = ctypes.c_int()
        width = ctypes.c_int()
        height = ctypes.c_int()
        if sdl2.render.SDL_QueryTexture(
                texture, ctypes.byref(flags), ctypes.byref(access), ctypes.byref(width), ctypes.byref(height)) != 0:
            raise SDLError()

        return width.value, height.value

    def __del__(self):
        if self._font is not None:
            sdl2.sdlttf.TTF_CloseFont(self._font)
            self._font = None
Exemplo n.º 25
0
class Person(object):
    name = attrib()
    gender = attrib(validator=is_valid_gender)
    age = attrib(validator=[validators.instance_of(int), is_less_than_100])
Exemplo n.º 26
0
class Boss(object):
    _W = attrib()
    _side = attrib(validator=instance_of(type(u"")))
    _url = attrib(validator=instance_of(type(u"")))
    _appid = attrib(validator=instance_of(type(u"")))
    _versions = attrib(validator=instance_of(dict))
    _reactor = attrib()
    _journal = attrib(validator=provides(_interfaces.IJournal))
    _tor = attrib(validator=optional(provides(_interfaces.ITorManager)))
    _timing = attrib(validator=provides(_interfaces.ITiming))
    m = MethodicalMachine()
    set_trace = getattr(m, "_setTrace", lambda self, f: None)

    def __attrs_post_init__(self):
        self._build_workers()
        self._init_other_state()

    def _build_workers(self):
        self._N = Nameplate()
        self._M = Mailbox(self._side)
        self._S = Send(self._side, self._timing)
        self._O = Order(self._side, self._timing)
        self._K = Key(self._appid, self._versions, self._side, self._timing)
        self._R = Receive(self._side, self._timing)
        self._RC = RendezvousConnector(self._url, self._appid, self._side,
                                       self._reactor, self._journal, self._tor,
                                       self._timing)
        self._L = Lister(self._timing)
        self._A = Allocator(self._timing)
        self._I = Input(self._timing)
        self._C = Code(self._timing)
        self._T = Terminator()

        self._N.wire(self._M, self._I, self._RC, self._T)
        self._M.wire(self._N, self._RC, self._O, self._T)
        self._S.wire(self._M)
        self._O.wire(self._K, self._R)
        self._K.wire(self, self._M, self._R)
        self._R.wire(self, self._S)
        self._RC.wire(self, self._N, self._M, self._A, self._L, self._T)
        self._L.wire(self._RC, self._I)
        self._A.wire(self._RC, self._C)
        self._I.wire(self._C, self._L)
        self._C.wire(self, self._A, self._N, self._K, self._I)
        self._T.wire(self, self._RC, self._N, self._M)

    def _init_other_state(self):
        self._did_start_code = False
        self._next_tx_phase = 0
        self._next_rx_phase = 0
        self._rx_phases = {}  # phase -> plaintext

        self._result = "empty"

    # these methods are called from outside
    def start(self):
        self._RC.start()

    def _print_trace(self, old_state, input, new_state, client_name, machine,
                     file):
        if new_state:
            print("%s.%s[%s].%s -> [%s]" %
                  (client_name, machine, old_state, input, new_state),
                  file=file)
        else:
            # the RendezvousConnector emits message events as if
            # they were state transitions, except that old_state
            # and new_state are empty strings. "input" is one of
            # R.connected, R.rx(type phase+side), R.tx(type
            # phase), R.lost .
            print("%s.%s.%s" % (client_name, machine, input), file=file)
        file.flush()

        def output_tracer(output):
            print(" %s.%s.%s()" % (client_name, machine, output), file=file)
            file.flush()

        return output_tracer

    def _set_trace(self, client_name, which, file):
        names = {
            "B": self,
            "N": self._N,
            "M": self._M,
            "S": self._S,
            "O": self._O,
            "K": self._K,
            "SK": self._K._SK,
            "R": self._R,
            "RC": self._RC,
            "L": self._L,
            "C": self._C,
            "T": self._T
        }
        for machine in which.split():
            t = (lambda old_state, input, new_state, machine=machine: self.
                 _print_trace(old_state,
                              input,
                              new_state,
                              client_name=client_name,
                              machine=machine,
                              file=file))
            names[machine].set_trace(t)

    ## def serialize(self):
    ##     raise NotImplemented

    # and these are the state-machine transition functions, which don't take
    # args
    @m.state(initial=True)
    def S0_empty(self):
        pass  # pragma: no cover

    @m.state()
    def S1_lonely(self):
        pass  # pragma: no cover

    @m.state()
    def S2_happy(self):
        pass  # pragma: no cover

    @m.state()
    def S3_closing(self):
        pass  # pragma: no cover

    @m.state(terminal=True)
    def S4_closed(self):
        pass  # pragma: no cover

    # from the Wormhole

    # input/allocate/set_code are regular methods, not state-transition
    # inputs. We expect them to be called just after initialization, while
    # we're in the S0_empty state. You must call exactly one of them, and the
    # call must happen while we're in S0_empty, which makes them good
    # candiates for being a proper @m.input, but set_code() will immediately
    # (reentrantly) cause self.got_code() to be fired, which is messy. These
    # are all passthroughs to the Code machine, so one alternative would be
    # to have Wormhole call Code.{input,allocate,set_code} instead, but that
    # would require the Wormhole to be aware of Code (whereas right now
    # Wormhole only knows about this Boss instance, and everything else is
    # hidden away).
    def input_code(self):
        if self._did_start_code:
            raise OnlyOneCodeError()
        self._did_start_code = True
        return self._C.input_code()

    def allocate_code(self, code_length):
        if self._did_start_code:
            raise OnlyOneCodeError()
        self._did_start_code = True
        wl = PGPWordList()
        self._C.allocate_code(code_length, wl)

    def set_code(self, code):
        if ' ' in code:
            raise KeyFormatError("code (%s) contains spaces." % code)
        if self._did_start_code:
            raise OnlyOneCodeError()
        self._did_start_code = True
        self._C.set_code(code)

    @m.input()
    def send(self, plaintext):
        pass

    @m.input()
    def close(self):
        pass

    # from RendezvousConnector:
    # * "rx_welcome" is the Welcome message, which might signal an error, or
    #   our welcome_handler might signal one
    # * "rx_error" is error message from the server (probably because of
    #   something we said badly, or due to CrowdedError)
    # * "error" is when an exception happened while it tried to deliver
    #   something else
    def rx_welcome(self, welcome):
        try:
            if "error" in welcome:
                raise WelcomeError(welcome["error"])
            # TODO: it'd be nice to not call the handler when we're in
            # S3_closing or S4_closed states. I tried to implement this with
            # rx_welcome as an @input, but in the error case I'd be
            # delivering a new input (rx_error or something) while in the
            # middle of processing the rx_welcome input, and I wasn't sure
            # Automat would handle that correctly.
            self._W.got_welcome(welcome)  # TODO: let this raise WelcomeError?
        except WelcomeError as welcome_error:
            self.rx_unwelcome(welcome_error)

    @m.input()
    def rx_unwelcome(self, welcome_error):
        pass

    @m.input()
    def rx_error(self, errmsg, orig):
        pass

    @m.input()
    def error(self, err):
        pass

    # from Code (provoked by input/allocate/set_code)
    @m.input()
    def got_code(self, code):
        pass

    # Key sends (got_key, scared)
    # Receive sends (got_message, happy, got_verifier, scared)
    @m.input()
    def happy(self):
        pass

    @m.input()
    def scared(self):
        pass

    def got_message(self, phase, plaintext):
        assert isinstance(phase, type("")), type(phase)
        assert isinstance(plaintext, type(b"")), type(plaintext)
        if phase == "version":
            self._got_version(plaintext)
        elif re.search(r'^\d+$', phase):
            self._got_phase(int(phase), plaintext)
        else:
            # Ignore unrecognized phases, for forwards-compatibility. Use
            # log.err so tests will catch surprises.
            log.err(_UnknownPhaseError("received unknown phase '%s'" % phase))

    @m.input()
    def _got_version(self, plaintext):
        pass

    @m.input()
    def _got_phase(self, phase, plaintext):
        pass

    @m.input()
    def got_key(self, key):
        pass

    @m.input()
    def got_verifier(self, verifier):
        pass

    # Terminator sends closed
    @m.input()
    def closed(self):
        pass

    @m.output()
    def do_got_code(self, code):
        self._W.got_code(code)

    @m.output()
    def process_version(self, plaintext):
        # most of this is wormhole-to-wormhole, ignored for now
        # in the future, this is how Dilation is signalled
        self._their_versions = bytes_to_dict(plaintext)
        # but this part is app-to-app
        app_versions = self._their_versions.get("app_versions", {})
        self._W.got_versions(app_versions)

    @m.output()
    def S_send(self, plaintext):
        assert isinstance(plaintext, type(b"")), type(plaintext)
        phase = self._next_tx_phase
        self._next_tx_phase += 1
        self._S.send("%d" % phase, plaintext)

    @m.output()
    def close_unwelcome(self, welcome_error):
        #assert isinstance(err, WelcomeError)
        self._result = welcome_error
        self._T.close("unwelcome")

    @m.output()
    def close_error(self, errmsg, orig):
        self._result = ServerError(errmsg)
        self._T.close("errory")

    @m.output()
    def close_scared(self):
        self._result = WrongPasswordError()
        self._T.close("scary")

    @m.output()
    def close_lonely(self):
        self._result = LonelyError()
        self._T.close("lonely")

    @m.output()
    def close_happy(self):
        self._result = "happy"
        self._T.close("happy")

    @m.output()
    def W_got_key(self, key):
        self._W.got_key(key)

    @m.output()
    def W_got_verifier(self, verifier):
        self._W.got_verifier(verifier)

    @m.output()
    def W_received(self, phase, plaintext):
        assert isinstance(phase, six.integer_types), type(phase)
        # we call Wormhole.received() in strict phase order, with no gaps
        self._rx_phases[phase] = plaintext
        while self._next_rx_phase in self._rx_phases:
            self._W.received(self._rx_phases.pop(self._next_rx_phase))
            self._next_rx_phase += 1

    @m.output()
    def W_close_with_error(self, err):
        self._result = err  # exception
        self._W.closed(self._result)

    @m.output()
    def W_closed(self):
        # result is either "happy" or a WormholeError of some sort
        self._W.closed(self._result)

    S0_empty.upon(close, enter=S3_closing, outputs=[close_lonely])
    S0_empty.upon(send, enter=S0_empty, outputs=[S_send])
    S0_empty.upon(rx_unwelcome, enter=S3_closing, outputs=[close_unwelcome])
    S0_empty.upon(got_code, enter=S1_lonely, outputs=[do_got_code])
    S0_empty.upon(rx_error, enter=S3_closing, outputs=[close_error])
    S0_empty.upon(error, enter=S4_closed, outputs=[W_close_with_error])

    S1_lonely.upon(rx_unwelcome, enter=S3_closing, outputs=[close_unwelcome])
    S1_lonely.upon(happy, enter=S2_happy, outputs=[])
    S1_lonely.upon(scared, enter=S3_closing, outputs=[close_scared])
    S1_lonely.upon(close, enter=S3_closing, outputs=[close_lonely])
    S1_lonely.upon(send, enter=S1_lonely, outputs=[S_send])
    S1_lonely.upon(got_key, enter=S1_lonely, outputs=[W_got_key])
    S1_lonely.upon(rx_error, enter=S3_closing, outputs=[close_error])
    S1_lonely.upon(error, enter=S4_closed, outputs=[W_close_with_error])

    S2_happy.upon(rx_unwelcome, enter=S3_closing, outputs=[close_unwelcome])
    S2_happy.upon(got_verifier, enter=S2_happy, outputs=[W_got_verifier])
    S2_happy.upon(_got_phase, enter=S2_happy, outputs=[W_received])
    S2_happy.upon(_got_version, enter=S2_happy, outputs=[process_version])
    S2_happy.upon(scared, enter=S3_closing, outputs=[close_scared])
    S2_happy.upon(close, enter=S3_closing, outputs=[close_happy])
    S2_happy.upon(send, enter=S2_happy, outputs=[S_send])
    S2_happy.upon(rx_error, enter=S3_closing, outputs=[close_error])
    S2_happy.upon(error, enter=S4_closed, outputs=[W_close_with_error])

    S3_closing.upon(rx_unwelcome, enter=S3_closing, outputs=[])
    S3_closing.upon(rx_error, enter=S3_closing, outputs=[])
    S3_closing.upon(got_verifier, enter=S3_closing, outputs=[])
    S3_closing.upon(_got_phase, enter=S3_closing, outputs=[])
    S3_closing.upon(_got_version, enter=S3_closing, outputs=[])
    S3_closing.upon(happy, enter=S3_closing, outputs=[])
    S3_closing.upon(scared, enter=S3_closing, outputs=[])
    S3_closing.upon(close, enter=S3_closing, outputs=[])
    S3_closing.upon(send, enter=S3_closing, outputs=[])
    S3_closing.upon(closed, enter=S4_closed, outputs=[W_closed])
    S3_closing.upon(error, enter=S4_closed, outputs=[W_close_with_error])

    S4_closed.upon(rx_unwelcome, enter=S4_closed, outputs=[])
    S4_closed.upon(got_verifier, enter=S4_closed, outputs=[])
    S4_closed.upon(_got_phase, enter=S4_closed, outputs=[])
    S4_closed.upon(_got_version, enter=S4_closed, outputs=[])
    S4_closed.upon(happy, enter=S4_closed, outputs=[])
    S4_closed.upon(scared, enter=S4_closed, outputs=[])
    S4_closed.upon(close, enter=S4_closed, outputs=[])
    S4_closed.upon(send, enter=S4_closed, outputs=[])
    S4_closed.upon(error, enter=S4_closed, outputs=[])
Exemplo n.º 27
0
class MorphosyntacticProperty:
    name: str = attrib(validator=instance_of(str))

    def __repr__(self) -> str:
        return self.name
Exemplo n.º 28
0
class AuthProvider(object):
    """
    Provider for authentication and authorization support.
    """

    _log = Logger()

    store: IMSDataStore = attrib(validator=instance_of(IMSDataStore))

    dms: DutyManagementSystem = attrib(
        validator=instance_of(DutyManagementSystem))

    requireActive: bool = attrib(validator=instance_of(bool), default=True)

    adminUsers: FrozenSet[str] = attrib(default=frozenset())

    masterKey: Optional[str] = attrib(validator=optional(instance_of(str)),
                                      default=None)

    async def verifyCredentials(self, user: User, password: str) -> bool:
        """
        Verify a password for the given user.
        """
        if user is None:
            authenticated = False
        else:
            try:
                if (self.masterKey is not None and password == self.masterKey):
                    return True

                hashedPassword = user.hashedPassword
                if hashedPassword is None:
                    return False

                authenticated = verifyPassword(password, hashedPassword)
            except Exception:
                self._log.failure("Unable to check password")
                authenticated = False

        self._log.debug(
            "Valid credentials for {user}: {result}",
            user=user,
            result=authenticated,
        )

        return authenticated

    def authenticateRequest(self,
                            request: IRequest,
                            optional: bool = False) -> None:
        """
        Authenticate a request.

        @param request: The request to authenticate.

        @param optional: If true, do not raise NotAuthenticatedError() if no
            user is associated with the request.
        """
        session = request.getSession()
        request.user = getattr(session, "user", None)

        if request.user is None and not optional:
            self._log.debug("Authentication failed")
            raise NotAuthenticatedError()

    async def authorizationsForUser(self, user: User,
                                    event: Optional[Event]) -> Authorization:
        """
        Look up the authorizations that a user has for a given event.
        """
        def matchACL(user: User, acl: Container[str]) -> bool:
            if "*" in acl:
                return True

            for shortName in user.shortNames:
                if ("person:" + shortName) in acl:
                    return True

            for group in user.groups:
                if ("position:" + group) in acl:
                    return True

            return False

        authorizations = Authorization.none

        if user is not None:
            authorizations |= Authorization.writeIncidentReports

            if user.active or not self.requireActive:
                authorizations |= Authorization.readPersonnel
                authorizations |= Authorization.readIncidentReports

                for shortName in user.shortNames:
                    if shortName in self.adminUsers:
                        authorizations |= Authorization.imsAdmin

                    if event is not None:
                        if matchACL(user,
                                    frozenset(await
                                              self.store.writers(event))):
                            authorizations |= Authorization.writeIncidents
                            authorizations |= Authorization.readIncidents
                        else:
                            if matchACL(
                                    user,
                                    frozenset(await
                                              self.store.readers(event))):
                                authorizations |= Authorization.readIncidents

        self._log.debug(
            "Authz for {user}: {authorizations}",
            user=user,
            authorizations=authorizations,
        )

        return authorizations

    async def authorizeRequest(
        self,
        request: IRequest,
        event: Optional[Event],
        requiredAuthorizations: Authorization,
    ) -> None:
        """
        Determine whether the user attached to a request has the required
        authorizations in the context of a given event.
        """
        self.authenticateRequest(request)

        userAuthorizations = await self.authorizationsForUser(
            request.user, event)
        request.authorizations = userAuthorizations

        if not (requiredAuthorizations & userAuthorizations):
            self._log.debug(
                "Authorization failed for {request.user}. "
                "Requires {requiredAuthorizations}, has {userAuthorizations}. "
                "URI: {request.uri}",
                request=request,
                requiredAuthorizations=requiredAuthorizations,
                userAuthorizations=userAuthorizations,
            )
            raise NotAuthorizedError()

    async def authorizeRequestForIncidentReport(
            self, request: IRequest, incidentReport: IncidentReport) -> None:
        """
        Determine whether the user attached to a request has the required
        authorizations to read the incident report with the given number.
        """

        # The author of the incident report should be allowed to read and write
        # to it.

        if request.user is not None and incidentReport.reportEntries:
            rangerHandle = request.user.rangerHandle
            for reportEntry in incidentReport.reportEntries:
                if reportEntry.author == rangerHandle:
                    request.authorizations = (
                        Authorization.readIncidentReports
                        | Authorization.writeIncidentReports)
                    return

        # If there are incidents attached to this incident report, then the
        # permissions on the attached incidents (which are determined by the
        # events containing the incidents) determine the permission on the
        # incident report.
        # So we'll iterate over all of the events containing incidents that
        # this incident report is attached to, and see if any of those events
        # can approve the request.

        events = frozenset(
            event for event, _incidentNumber in await self.store.
            incidentsAttachedToIncidentReport(incidentReport.number))

        if events:
            for event in events:
                # There are incidents attached; use the authorization for
                # reading incidents from the corresponding events.
                # Because it's possible for multiple incidents to be attached,
                # if one event fails, keep trying the others in case they allow
                # it.
                try:
                    await self.authorizeRequest(request, event,
                                                Authorization.readIncidents)
                except NotAuthorizedError as e:
                    authFailure = e
                else:
                    return

            raise authFailure

        # Incident report is detached
        await self.authorizeRequest(request, None,
                                    Authorization.readIncidentReports)

    async def lookupUserName(self, username: str) -> Optional[User]:
        """
        Look up the user record for a user short name.
        """
        dms = self.dms

        # FIXME: a hash would be better (eg. rangersByHandle)
        try:
            rangers = tuple(await dms.personnel())
        except DMSError as e:
            self._log.critical("Unable to load personnel: {error}", error=e)
            return None

        for ranger in rangers:
            if ranger.handle == username:
                break
        else:
            for ranger in rangers:
                if username in ranger.email:
                    break
            else:
                return None

        positions = tuple(await dms.positions())

        groups = tuple(position.name for position in positions
                       if ranger in position.members)

        return User(ranger=ranger, groups=groups)
Exemplo n.º 29
0
class Update(Request):
    physical_resource_id = attr.ib(validator=instance_of(str), )
    old_resource_properties = attr.ib(validator=instance_of(dict), )
Exemplo n.º 30
0
 def test_success_with_type(self):
     """
     Nothing happens if types match.
     """
     v = optional(instance_of(int))
     v(None, simple_attr("test"), 42)
Exemplo n.º 31
0
class SubChannel(object):
    _scid = attrib(validator=instance_of(six.integer_types))
    _manager = attrib(validator=provides(IDilationManager))
    _host_addr = attrib(validator=instance_of(_WormholeAddress))
    _peer_addr = attrib(validator=instance_of(_SubchannelAddress))

    m = MethodicalMachine()
    set_trace = getattr(m, "_setTrace", lambda self,
                        f: None)  # pragma: no cover

    def __attrs_post_init__(self):
        # self._mailbox = None
        # self._pending_outbound = {}
        # self._processed = set()
        self._protocol = None
        self._pending_dataReceived = []
        self._pending_connectionLost = (False, None)

    @m.state(initial=True)
    def open(self):
        pass  # pragma: no cover

    @m.state()
    def closing():
        pass  # pragma: no cover

    @m.state()
    def closed():
        pass  # pragma: no cover

    @m.input()
    def remote_data(self, data):
        pass

    @m.input()
    def remote_close(self):
        pass

    @m.input()
    def local_data(self, data):
        pass

    @m.input()
    def local_close(self):
        pass

    @m.output()
    def send_data(self, data):
        self._manager.send_data(self._scid, data)

    @m.output()
    def send_close(self):
        self._manager.send_close(self._scid)

    @m.output()
    def signal_dataReceived(self, data):
        if self._protocol:
            self._protocol.dataReceived(data)
        else:
            self._pending_dataReceived.append(data)

    @m.output()
    def signal_connectionLost(self):
        if self._protocol:
            self._protocol.connectionLost(ConnectionDone())
        else:
            self._pending_connectionLost = (True, ConnectionDone())
        self._manager.subchannel_closed(self._scid, self)
        # we're deleted momentarily

    @m.output()
    def error_closed_write(self, data):
        raise AlreadyClosedError("write not allowed on closed subchannel")

    @m.output()
    def error_closed_close(self):
        raise AlreadyClosedError(
            "loseConnection not allowed on closed subchannel")

    # primary transitions
    open.upon(remote_data, enter=open, outputs=[signal_dataReceived])
    open.upon(local_data, enter=open, outputs=[send_data])
    open.upon(remote_close, enter=closed, outputs=[send_close, signal_connectionLost])
    open.upon(local_close, enter=closing, outputs=[send_close])
    closing.upon(remote_data, enter=closing, outputs=[signal_dataReceived])
    closing.upon(remote_close, enter=closed, outputs=[signal_connectionLost])

    # error cases
    # we won't ever see an OPEN, since L4 will log+ignore those for us
    closing.upon(local_data, enter=closing, outputs=[error_closed_write])
    closing.upon(local_close, enter=closing, outputs=[error_closed_close])
    # the CLOSED state won't ever see messages, since we'll be deleted

    # our endpoints use this

    def _set_protocol(self, protocol):
        assert not self._protocol
        self._protocol = protocol
        if self._pending_dataReceived:
            for data in self._pending_dataReceived:
                self._protocol.dataReceived(data)
            self._pending_dataReceived = []
        cl, what = self._pending_connectionLost
        if cl:
            self._protocol.connectionLost(what)

    # ITransport
    def write(self, data):
        assert isinstance(data, type(b""))
        assert len(data) <= MAX_FRAME_LENGTH
        self.local_data(data)

    def writeSequence(self, iovec):
        self.write(b"".join(iovec))

    def loseConnection(self):
        self.local_close()

    def getHost(self):
        # we define "host addr" as the overall wormhole
        return self._host_addr

    def getPeer(self):
        # and "peer addr" as the subchannel within that wormhole
        return self._peer_addr

    # IProducer: throttle inbound data (wormhole "up" to local app's Protocol)
    def stopProducing(self):
        self._manager.subchannel_stopProducing(self)

    def pauseProducing(self):
        self._manager.subchannel_pauseProducing(self)

    def resumeProducing(self):
        self._manager.subchannel_resumeProducing(self)

    # IConsumer: allow the wormhole to throttle outbound data (app->wormhole)
    def registerProducer(self, producer, streaming):
        self._manager.subchannel_registerProducer(self, producer, streaming)

    def unregisterProducer(self):
        self._manager.subchannel_unregisterProducer(self)
Exemplo n.º 32
0
import os.path
import yaml

from attr import attributes, attr, validators, asdict


valid_str = validators.instance_of(str)

optional_str_attr = attr(
    validator=validators.optional(valid_str),
    default='',
)


@attributes
class Config:
    username = optional_str_attr
    password = optional_str_attr


def get_config(path):

    if not os.path.exists(path):
        return Config()

    with open(path) as f:
        config = yaml.load(f)

    return Config(
        username=config['username'],
        password=config['password'],
Exemplo n.º 33
0
 def test_success_with_type(self):
     """
     Nothing happens if types match.
     """
     v = optional(instance_of(int))
     v(None, simple_attr("test"), 42)
Exemplo n.º 34
0
class ObjectRecognizer:
    """
    The ObjectRecognizer finds object matches in the scene pattern and adds a `ObjectSemanticNodePerceptionPredicate`
    which can be used to learn additional semantics which relate objects to other objects

    If applied to a dynamic situation, this will only recognize objects
    which are present in both the BEFORE and AFTER frames.
    """

    # Because static patterns must be applied to static perceptions
    # and dynamic patterns to dynamic situations,
    # we need to store our patterns both ways.
    _concepts_to_static_patterns: ImmutableDict[
        ObjectConcept, PerceptionGraphPattern] = attrib(
            validator=deep_mapping(instance_of(ObjectConcept),
                                   instance_of(PerceptionGraphPattern)),
            converter=_to_immutabledict,
        )
    _concepts_to_names: ImmutableDict[ObjectConcept, str] = attrib(
        validator=deep_mapping(instance_of(ObjectConcept), instance_of(str)),
        converter=_to_immutabledict,
    )

    # We derive these from the static patterns.
    _concepts_to_dynamic_patterns: ImmutableDict[
        ObjectConcept, PerceptionGraphPattern] = attrib(init=False)
    determiners: ImmutableSet[str] = attrib(converter=_to_immutableset,
                                            validator=deep_iterable(
                                                instance_of(str)))
    """
    This is a hack to handle determiners.
    See https://github.com/isi-vista/adam/issues/498
    """
    _concept_to_num_subobjects: ImmutableDict[Concept,
                                              int] = attrib(init=False)
    """
    Used for a performance optimization in match_objects.
    """
    _language_mode: LanguageMode = attrib(validator=instance_of(LanguageMode),
                                          kw_only=True)

    def __attrs_post_init__(self) -> None:
        non_lowercase_determiners = [
            determiner for determiner in self.determiners
            if determiner.lower() != determiner
        ]
        if non_lowercase_determiners:
            raise RuntimeError(
                f"All determiners must be specified in lowercase, but got "
                f"{non_lowercase_determiners}")

    @staticmethod
    def for_ontology_types(
        ontology_types: Iterable[OntologyNode],
        determiners: Iterable[str],
        ontology: Ontology,
        language_mode: LanguageMode,
        *,
        perception_generator:
        HighLevelSemanticsSituationToDevelopmentalPrimitivePerceptionGenerator,
    ) -> "ObjectRecognizer":
        ontology_types_to_concepts = {
            obj_type: ObjectConcept(obj_type.handle)
            for obj_type in ontology_types
        }

        return ObjectRecognizer(
            concepts_to_static_patterns=_sort_mapping_by_pattern_complexity(
                immutabledict((
                    concept,
                    PerceptionGraphPattern.from_ontology_node(
                        obj_type,
                        ontology,
                        perception_generator=perception_generator),
                ) for (obj_type,
                       concept) in ontology_types_to_concepts.items())),
            determiners=determiners,
            concepts_to_names={
                concept: obj_type.handle
                for obj_type, concept in ontology_types_to_concepts.items()
            },
            language_mode=language_mode,
        )

    def match_objects_old(
        self, perception_graph: PerceptionGraph
    ) -> PerceptionGraphFromObjectRecognizer:
        new_style_input = PerceptionSemanticAlignment(
            perception_graph=perception_graph, semantic_nodes=[])
        new_style_output = self.match_objects(new_style_input)
        return PerceptionGraphFromObjectRecognizer(
            perception_graph=new_style_output[0].perception_graph,
            description_to_matched_object_node=new_style_output[1],
        )

    def match_objects(
        self,
        perception_semantic_alignment: PerceptionSemanticAlignment,
        *,
        post_process: Callable[[PerceptionGraph, AbstractSet[SemanticNode]],
                               Tuple[PerceptionGraph,
                                     AbstractSet[SemanticNode]],
                               ] = default_post_process_enrichment,
    ) -> Tuple[PerceptionSemanticAlignment, Mapping[Tuple[str, ...],
                                                    ObjectSemanticNode]]:
        r"""
        Recognize known objects in a `PerceptionGraph`.

        The matched portion of the graph will be replaced with an `ObjectSemanticNode`\ s
        which will inherit all relationships of any nodes internal to the matched portion
        with any external nodes.

        This is useful as a pre-processing step
        before prepositional and verbal learning experiments.
        """

        # pylint: disable=global-statement,invalid-name
        global cumulative_millis_in_successful_matches_ms
        global cumulative_millis_in_failed_matches_ms

        object_nodes: List[Tuple[Tuple[str, ...], ObjectSemanticNode]] = []
        perception_graph = perception_semantic_alignment.perception_graph
        is_dynamic = perception_semantic_alignment.perception_graph.dynamic

        if is_dynamic:
            concepts_to_patterns = self._concepts_to_dynamic_patterns
        else:
            concepts_to_patterns = self._concepts_to_static_patterns

        # We special case handling the ground perception
        # Because we don't want to remove it from the graph, we just want to use it's
        # Object node as a recognized object. The situation "a box on the ground"
        # Prompted the need to recognize the ground
        graph_to_return = perception_graph
        for node in graph_to_return._graph.nodes:  # pylint:disable=protected-access
            if node == GROUND_PERCEPTION:
                matched_object_node = ObjectSemanticNode(GROUND_OBJECT_CONCEPT)
                if LanguageMode.ENGLISH == self._language_mode:
                    object_nodes.append(
                        ((f"{GROUND_OBJECT_CONCEPT.debug_string}", ),
                         matched_object_node))
                elif LanguageMode.CHINESE == self._language_mode:
                    object_nodes.append((("di4 myan4", ), matched_object_node))
                else:
                    raise RuntimeError("Invalid language_generator")
                # We construct a fake match which is only the ground perception node
                subgraph_of_root = subgraph(perception_graph.copy_as_digraph(),
                                            [node])
                pattern_match = PerceptionGraphPatternMatch(
                    matched_pattern=PerceptionGraphPattern(
                        graph=subgraph_of_root,
                        dynamic=perception_graph.dynamic),
                    graph_matched_against=perception_graph,
                    matched_sub_graph=PerceptionGraph(
                        graph=subgraph_of_root,
                        dynamic=perception_graph.dynamic),
                    pattern_node_to_matched_graph_node=immutabledict(),
                )
                graph_to_return = replace_match_with_object_graph_node(
                    matched_object_node, graph_to_return,
                    pattern_match).perception_graph_after_replacement

        candidate_object_subgraphs = extract_candidate_objects(
            perception_graph, sort_by_increasing_size=True)

        for candidate_object_graph in candidate_object_subgraphs:
            num_object_nodes = candidate_object_graph.count_nodes_matching(
                lambda node: isinstance(node, ObjectPerception))

            for (concept, pattern) in concepts_to_patterns.items():
                # As an optimization, we count how many sub-object nodes
                # are in the graph and the pattern.
                # If they aren't the same, the match is impossible
                # and we can bail out early.
                if num_object_nodes != self._concept_to_num_subobjects[concept]:
                    continue
                with Timer(factor=1000) as t:
                    matcher = pattern.matcher(candidate_object_graph,
                                              match_mode=MatchMode.OBJECT)
                    pattern_match = first(
                        matcher.matches(use_lookahead_pruning=True), None)
                if pattern_match:
                    cumulative_millis_in_successful_matches_ms += t.elapsed
                    matched_object_node = ObjectSemanticNode(concept)

                    # We wrap the concept in a tuple because it could in theory be multiple
                    # tokens,
                    # even though currently it never is.
                    if self._language_mode == LanguageMode.ENGLISH:
                        object_nodes.append(
                            ((concept.debug_string, ), matched_object_node))
                    elif self._language_mode == LanguageMode.CHINESE:
                        if concept.debug_string == "me":
                            object_nodes.append(
                                (("wo3", ), matched_object_node))
                        elif concept.debug_string == "you":
                            object_nodes.append(
                                (("ni3", ), matched_object_node))
                        mappings = (
                            GAILA_PHASE_1_CHINESE_LEXICON.
                            _ontology_node_to_word  # pylint:disable=protected-access
                        )
                        for k, v in mappings.items():
                            if k.handle == concept.debug_string:
                                debug_string = str(v.base_form)
                                object_nodes.append(
                                    ((debug_string, ), matched_object_node))
                    graph_to_return = replace_match_with_object_graph_node(
                        matched_object_node, graph_to_return,
                        pattern_match).perception_graph_after_replacement
                    # We match each candidate objects against only one object type.
                    # See https://github.com/isi-vista/adam/issues/627
                    break
                else:
                    cumulative_millis_in_failed_matches_ms += t.elapsed

        if object_nodes:
            logging.info(
                "Object recognizer recognized: %s",
                [concept for (concept, _) in object_nodes],
            )
        logging.info(
            "object matching: ms in success: %s, ms in failed: %s",
            cumulative_millis_in_successful_matches_ms,
            cumulative_millis_in_failed_matches_ms,
        )
        semantic_object_nodes = immutableset(node
                                             for (_, node) in object_nodes)

        post_process_graph, post_process_nodes = post_process(
            graph_to_return, semantic_object_nodes)

        return (
            perception_semantic_alignment.
            copy_with_updated_graph_and_added_nodes(
                new_graph=post_process_graph, new_nodes=post_process_nodes),
            immutabledict(object_nodes),
        )

    def match_objects_with_language_old(
        self, language_aligned_perception: LanguageAlignedPerception
    ) -> LanguageAlignedPerception:
        if language_aligned_perception.node_to_language_span:
            raise RuntimeError(
                "Don't know how to handle a non-empty node-to-language-span")
        new_style_input = LanguagePerceptionSemanticAlignment(
            language_concept_alignment=LanguageConceptAlignment(
                language_aligned_perception.language,
                node_to_language_span=[]),
            perception_semantic_alignment=PerceptionSemanticAlignment(
                perception_graph=language_aligned_perception.perception_graph,
                semantic_nodes=[],
            ),
        )
        new_style_output = self.match_objects_with_language(new_style_input)
        return LanguageAlignedPerception(
            language=new_style_output.language_concept_alignment.language,
            perception_graph=new_style_output.perception_semantic_alignment.
            perception_graph,
            node_to_language_span=new_style_output.language_concept_alignment.
            node_to_language_span,
        )

    def match_objects_with_language(
        self,
        language_perception_semantic_alignment:
        LanguagePerceptionSemanticAlignment,
        *,
        post_process: Callable[[PerceptionGraph, AbstractSet[SemanticNode]],
                               Tuple[PerceptionGraph,
                                     AbstractSet[SemanticNode]],
                               ] = default_post_process_enrichment,
    ) -> LanguagePerceptionSemanticAlignment:
        """
        Recognize known objects in a `LanguagePerceptionSemanticAlignment`.

        For each node matched, this will identify the relevant portion of the linguistic input
        and record the correspondence.

        The matched portion of the graph will be replaced with an `ObjectSemanticNode`
        which will inherit all relationships of any nodes internal to the matched portion
        with any external nodes.

        This is useful as a pre-processing step
        before prepositional and verbal learning experiments.
        """
        if (language_perception_semantic_alignment.
                perception_semantic_alignment.semantic_nodes):
            raise RuntimeError(
                "We assume ObjectRecognizer is run first, with no previous "
                "alignments")

        (
            post_match_perception_semantic_alignment,
            tokens_to_object_nodes,
        ) = self.match_objects(
            language_perception_semantic_alignment.
            perception_semantic_alignment,
            post_process=post_process,
        )
        return LanguagePerceptionSemanticAlignment(
            language_concept_alignment=language_perception_semantic_alignment.
            language_concept_alignment.copy_with_added_token_alignments(
                self._align_objects_to_tokens(
                    tokens_to_object_nodes,
                    language_perception_semantic_alignment.
                    language_concept_alignment.language,
                )),
            perception_semantic_alignment=
            post_match_perception_semantic_alignment,
        )

    def _align_objects_to_tokens(
        self,
        description_to_object_node: Mapping[Tuple[str, ...],
                                            ObjectSemanticNode],
        language: LinguisticDescription,
    ) -> Mapping[ObjectSemanticNode, Span]:
        result: List[Tuple[ObjectSemanticNode, Span]] = []

        # We want to ban the same token index from being aligned twice.
        matched_token_indices: Set[int] = set()

        for (description_tuple,
             object_node) in description_to_object_node.items():
            if len(description_tuple) != 1:
                raise RuntimeError(
                    f"Multi-token descriptions are not yet supported:"
                    f"{description_tuple}")
            description = description_tuple[0]
            try:
                end_index_inclusive = language.index(description)
            except ValueError:
                # A scene might contain things which are not referred to by the associated language.
                continue

            start_index = end_index_inclusive
            # This is a somewhat language-dependent hack to gobble up preceding determiners.
            # See https://github.com/isi-vista/adam/issues/498 .
            if end_index_inclusive > 0:
                possible_determiner_index = end_index_inclusive - 1
                if language[possible_determiner_index].lower(
                ) in self.determiners:
                    start_index = possible_determiner_index

            # We record what tokens were covered so we can block the same tokens being used twice.
            for included_token_index in range(start_index,
                                              end_index_inclusive + 1):
                if included_token_index in matched_token_indices:
                    raise RuntimeError(
                        "We do not currently support the same object "
                        "being mentioned twice in a sentence.")
                matched_token_indices.add(included_token_index)

            result.append((
                object_node,
                language.span(start_index,
                              end_index_exclusive=end_index_inclusive + 1),
            ))
        return immutabledict(result)

    @_concepts_to_dynamic_patterns.default
    def _init_concepts_to_dynamic_patterns(
            self) -> ImmutableDict[ObjectConcept, PerceptionGraphPattern]:
        return immutabledict(
            (concept, static_pattern.copy_with_temporal_scopes(ENTIRE_SCENE))
            for (concept,
                 static_pattern) in self._concepts_to_static_patterns.items())

    @_concept_to_num_subobjects.default
    def _init_patterns_to_num_subobjects(
            self) -> ImmutableDict[ObjectConcept, int]:
        return immutabledict((
            concept,
            pattern.count_nodes_matching(
                lambda node: isinstance(node, AnyObjectPerception)),
        ) for (concept, pattern) in self._concepts_to_static_patterns.items())
Exemplo n.º 35
0
def optional_instance_of(cls):
    return validators.optional(validators.instance_of(cls))
Exemplo n.º 36
0
 def test_success(self):
     """
     Nothing happens if types match.
     """
     v = instance_of(int)
     v(None, simple_attr("test"), 42)
Exemplo n.º 37
0
def optional_instance_of(cls):
    return validators.optional(validators.instance_of(cls))
Exemplo n.º 38
0
class Key(object):
    _appid = attrib(validator=instance_of(type(u"")))
    _versions = attrib(validator=instance_of(dict))
    _side = attrib(validator=instance_of(type(u"")))
    _timing = attrib(validator=provides(_interfaces.ITiming))
    m = MethodicalMachine()
    set_trace = getattr(m, "_setTrace",
                        lambda self, f: None)  # pragma: no cover

    def __attrs_post_init__(self):
        self._SK = _SortedKey(self._appid, self._versions, self._side,
                              self._timing)
        self._debug_pake_stashed = False  # for tests

    def wire(self, boss, mailbox, receive):
        self._SK.wire(boss, mailbox, receive)

    @m.state(initial=True)
    def S00(self):
        pass  # pragma: no cover

    @m.state()
    def S01(self):
        pass  # pragma: no cover

    @m.state()
    def S10(self):
        pass  # pragma: no cover

    @m.state()
    def S11(self):
        pass  # pragma: no cover

    @m.input()
    def got_code(self, code):
        pass

    @m.input()
    def got_pake(self, body):
        pass

    @m.output()
    def stash_pake(self, body):
        self._pake = body
        self._debug_pake_stashed = True

    @m.output()
    def deliver_code(self, code):
        self._SK.got_code(code)

    @m.output()
    def deliver_pake(self, body):
        self._SK.got_pake(body)

    @m.output()
    def deliver_code_and_stashed_pake(self, code):
        self._SK.got_code(code)
        self._SK.got_pake(self._pake)

    S00.upon(got_code, enter=S10, outputs=[deliver_code])
    S10.upon(got_pake, enter=S11, outputs=[deliver_pake])
    S00.upon(got_pake, enter=S01, outputs=[stash_pake])
    S01.upon(got_code, enter=S11, outputs=[deliver_code_and_stashed_pake])
Exemplo n.º 39
0
 def test_success_with_none(self):
     """
     Nothing happens if None.
     """
     v = optional(instance_of(int))
     v(None, simple_attr("test"), None)
Exemplo n.º 40
0

def check_by(by, current):
    if not isinstance(by, int):
        raise TypeError('by must be an integer')

    if current is None and by < 0:
        raise ValueError(
            'Cannot bump by negative amount when current value is unset.')


validate_post_tag = unset_or(optional(in_(POST_TAGS)))
validate_pre_tag = optional(in_(PRE_TAGS))
validate_sep = optional(in_(SEPS))
validate_sep_or_unset = unset_or(optional(in_(SEPS)))
is_bool = instance_of(bool)
is_int = instance_of(six.integer_types)
is_str = instance_of(six.string_types)
is_seq = instance_of(Sequence)

# "All numeric components MUST be non-negative integers."
num_comp = [not_bool, is_int, is_non_negative]


def sequence_of(validator, allow_empty=False):
    if isinstance(validator, list):
        validator = and_(*validator)

    def validate(inst, attr, value):
        is_seq(inst, attr, value)
Exemplo n.º 41
0
            a, IFoo, value,
        ) == e.value.args

    def test_repr(self):
        """
        Returned validator has a useful `__repr__`.
        """
        v = provides(IFoo)
        assert (
            "<provides validator for interface {interface!r}>"
            .format(interface=IFoo)
        ) == repr(v)


@pytest.mark.parametrize("validator", [
    instance_of(int),
    [always_pass, instance_of(int)],
])
class TestOptional(object):
    """
    Tests for `optional`.
    """
    def test_success(self, validator):
        """
        Nothing happens if validator succeeds.
        """
        v = optional(validator)
        v(None, simple_attr("test"), 42)

    def test_success_with_none(self, validator):
        """
Exemplo n.º 42
0
class OpenTSDBTarget(object):
    """Generates OpenTSDB target JSON structure.

    Grafana docs on using OpenTSDB:
    http://docs.grafana.org/features/datasources/opentsdb/
    OpenTSDB docs on querying or reading data:
    http://opentsdb.net/docs/build/html/user_guide/query/index.html


    :param metric: OpenTSDB metric name
    :param refId: target reference id
    :param aggregator: defines metric aggregator.
        The list of opentsdb aggregators:
        http://opentsdb.net/docs/build/html/user_guide/query/aggregators.html#available-aggregators
    :param alias: legend alias. Use patterns like $tag_tagname to replace part
        of the alias for a tag value.
    :param isCounter: defines if rate function results should
        be interpret as counter
    :param counterMax: defines rate counter max value
    :param counterResetValue: defines rate counter reset value
    :param disableDownsampling: defines if downsampling should be disabled.
        OpenTSDB docs on downsampling:
        http://opentsdb.net/docs/build/html/user_guide/query/index.html#downsampling
    :param downsampleAggregator: defines downsampling aggregator
    :param downsampleFillPolicy: defines downsampling fill policy
    :param downsampleInterval: defines downsampling interval
    :param filters: defines the list of metric query filters.
        OpenTSDB docs on filters:
        http://opentsdb.net/docs/build/html/user_guide/query/index.html#filters
    :param shouldComputeRate: defines if rate function should be used.
        OpenTSDB docs on rate function:
        http://opentsdb.net/docs/build/html/user_guide/query/index.html#rate
    :param currentFilterGroupBy: defines if grouping should be enabled for
        current filter
    :param currentFilterKey: defines current filter key
    :param currentFilterType: defines current filter type
    :param currentFilterValue: defines current filter value
    """

    metric = attr.ib()
    refId = attr.ib(default="")
    aggregator = attr.ib(default="sum")
    alias = attr.ib(default=None)
    isCounter = attr.ib(default=False, validator=instance_of(bool))
    counterMax = attr.ib(default=None)
    counterResetValue = attr.ib(default=None)
    disableDownsampling = attr.ib(default=False, validator=instance_of(bool))
    downsampleAggregator = attr.ib(default=OTSDB_AGG_SUM)
    downsampleFillPolicy = attr.ib(
        default=OTSDB_DOWNSAMPLING_FILL_POLICY_DEFAULT,
        validator=is_in(OTSDB_DOWNSAMPLING_FILL_POLICIES))
    downsampleInterval = attr.ib(default=None)
    filters = attr.ib(default=attr.Factory(list))
    shouldComputeRate = attr.ib(default=False, validator=instance_of(bool))
    currentFilterGroupBy = attr.ib(default=False, validator=instance_of(bool))
    currentFilterKey = attr.ib(default="")
    currentFilterType = attr.ib(default=OTSDB_QUERY_FILTER_DEFAULT)
    currentFilterValue = attr.ib(default="")

    def to_json_data(self):

        return {
            'aggregator': self.aggregator,
            'alias': self.alias,
            'isCounter': self.isCounter,
            'counterMax': self.counterMax,
            'counterResetValue': self.counterResetValue,
            'disableDownsampling': self.disableDownsampling,
            'downsampleAggregator': self.downsampleAggregator,
            'downsampleFillPolicy': self.downsampleFillPolicy,
            'downsampleInterval': self.downsampleInterval,
            'filters': self.filters,
            'metric': self.metric,
            'refId': self.refId,
            'shouldComputeRate': self.shouldComputeRate,
            'currentFilterGroupBy': self.currentFilterGroupBy,
            'currentFilterKey': self.currentFilterKey,
            'currentFilterType': self.currentFilterType,
            'currentFilterValue': self.currentFilterValue,
        }
Exemplo n.º 43
0
from collections import defaultdict
from io import BytesIO
from itertools import chain
from pathlib import Path
import sys
from typing import Dict, Iterable, Iterator, List, Union, Tuple, Optional

import attr
from attr import validators

from sortedcontainers import SortedList, SortedKeyList

from cassis.typesystem import FeatureStructure, TypeSystem

_validator_optional_string = validators.optional(validators.instance_of(str))


class IdGenerator:
    def __init__(self, initial_id: int = 1):
        self._next_id = initial_id

    def generate_id(self) -> int:
        result = self._next_id
        self._next_id += 1
        return result


@attr.s(slots=True)
class Sofa:
    """Each CAS has one or more Subject of Analysis (SofA)"""
Exemplo n.º 44
0
class NBRegressionFixture:
    """Class to perform Jupyter Notebook Regression tests."""

    exec_notebook: bool = attr.ib(True,
                                  instance_of(bool),
                                  metadata={"help": HELP_EXEC_NOTEBOOK})
    exec_cwd: Union[str, None] = attr.ib(None,
                                         instance_of((type(None), str)),
                                         metadata={"help": HELP_EXEC_CWD})

    @exec_cwd.validator
    def _validate_exec_cwd(self, attribute, value):
        if value is None:
            return
        if not isinstance(value, str):
            raise TypeError("exec_cwd must be None or a string")
        if not os.path.isdir(value):
            raise IOError(
                "exec_cwd='{}' is not an existing directory".format(value))

    exec_allow_errors: bool = attr.ib(
        False, instance_of(bool), metadata={"help": HELP_EXEC_ALLOW_ERRORS})
    exec_timeout: int = attr.ib(120,
                                instance_of((int, float)),
                                metadata={"help": HELP_EXEC_TIMEOUT})

    @exec_timeout.validator
    def _validate_exec_timeout(self, attribute, value):
        if not isinstance(value, int):
            raise TypeError("exec_timeout must be an integer")
        if value <= 0:
            raise ValueError("exec_timeout must be larger than 0")

    coverage: bool = attr.ib(False, metadata={"help": HELP_COVERAGE})

    @coverage.validator
    def _validate_coverage(self, attribute, value):
        if not isinstance(value, bool):
            raise TypeError("coverage must be an boolean")
        if value:
            try:
                import coverage  # noqa: F401
            except ImportError:
                raise ImportError("The 'coverage' package must be installed.")

    cov_config: Union[str,
                      None] = attr.ib(None,
                                      instance_of((type(None), str)),
                                      metadata={"help": HELP_COVERAGE_CONFIG})
    cov_source: Union[str, Tuple[str]] = attr.ib(
        None,
        instance_of((type(None), tuple)),
        metadata={"help": HELP_COVERAGE_SOURCE})

    cov_merge: Union[CoverageType,
                     None] = attr.ib(None,
                                     metadata={"help": HELP_COVERAGE_MERGE},
                                     hash=True)

    @cov_merge.validator
    def _validate_cov_merge(self, attribute, value):
        if value is None:
            return
        try:
            from coverage import Coverage
        except ImportError:
            raise ImportError("The 'coverage' package must be installed")
        if not isinstance(value, Coverage):
            raise TypeError(
                "cov_merge must be an instance of coverage.Coverage")

    post_processors: tuple = attr.ib(("coalesce_streams", ),
                                     metadata={"help": HELP_POST_PROCS})

    @post_processors.validator
    def _validate_post_processors(self, attribute, values):
        if not isinstance(values, tuple):
            raise TypeError(f"post_processors must be a tuple: {values}")
        for name in values:
            if name not in list_processor_names():
                raise TypeError(
                    f"name '{name}' not found in entry points: {list_processor_names()}"
                )

    process_resources: dict = attr.ib(
        attr.Factory(dict),
        instance_of(dict),
        metadata={"help": "Resources to parse to processor functions."},
    )

    diff_replace: tuple = attr.ib((), metadata={"help": HELP_DIFF_REPLACE})

    @diff_replace.validator
    def _validate_diff_replace(self, attribute, values):
        if not isinstance(values, tuple):
            raise TypeError(f"diff_replace must be a tuple: {values}")
        for i, args in enumerate(values):
            validate_regex_replace(args, i)

    diff_ignore: tuple = attr.ib(
        # TODO replace this default with a diff_replace one?
        (
            "/cells/*/outputs/*/traceback", ),
        metadata={"help": HELP_DIFF_IGNORE},
    )

    @diff_ignore.validator
    def _validate_diff_ignore(self, attribute, values):
        if not isinstance(values, tuple):
            raise TypeError(f"diff_ignore must be a tuple: {values}")
        for item in values:
            if not isinstance(item, str):
                raise TypeError(f"diff_ignore item '{item}' must a strings")
            if not item.startswith("/"):
                raise ValueError(
                    f"diff_ignore item '{item}' must start with '/'")

    diff_use_color: bool = attr.ib(True,
                                   instance_of(bool),
                                   metadata={"help": HELP_DIFF_USE_COLOR})
    diff_color_words: bool = attr.ib(False,
                                     instance_of(bool),
                                     metadata={"help": HELP_DIFF_COLOR_WORDS})

    force_regen: bool = attr.ib(False,
                                instance_of(bool),
                                metadata={"help": HELP_FORCE_REGEN})

    def __setattr__(self, key, value):
        """Add validation when setting attributes."""
        x_attr = getattr(attr.fields(self.__class__), key)
        if x_attr.validator:
            x_attr.validator(self, x_attr, value)

        super(NBRegressionFixture, self).__setattr__(key, value)

    def check(self,
              path: Union[TextIO, str],
              raise_errors: bool = True) -> NBRegressionResult:
        """Execute the Notebook and compare its initial vs. final contents.

        if ``force_regen`` is True, the new notebook will be written to ``path``

        if ``raise_errors`` is True:

        :raise nbconvert.preprocessors.CellExecutionError: if error in execution
        :raise NBConfigValidationError: if the notebook metadata is invalid
        :raise NBRegressionError: if diffs present

        :rtype: NBRegressionResult

        """
        __tracebackhide__ = True
        if hasattr(path, "name"):
            abspath = os.path.abspath(path.name)
        else:
            abspath = os.path.abspath(str(path))
        logger.debug(f"Checking file: {abspath}")

        nb_initial, nb_config = load_notebook_with_config(path)

        resources = copy.deepcopy(self.process_resources)
        if not self.exec_cwd:
            self.exec_cwd = os.path.dirname(abspath)

        if self.exec_notebook:
            logger.debug("Executing notebook.")
            exec_results = execute_notebook(
                nb_initial,
                resources=resources,
                cwd=self.exec_cwd,
                timeout=self.exec_timeout,
                allow_errors=self.exec_allow_errors,
                with_coverage=self.coverage,
                cov_config_file=self.cov_config,
                cov_source=self.cov_source,
            )
            exec_error = exec_results.exec_error
            nb_final = exec_results.notebook
            resources = exec_results.resources
        else:
            exec_error = None
            nb_final = nb_initial

        # TODO merge on fail option (using pytest-cov --no-cov-on-fail)
        if self.cov_merge and exec_results.has_coverage:
            logger.info("Merging coverage.")
            self.cov_merge.data.update(
                exec_results.coverage_data(self.cov_merge.debug),
                aliases=_get_coverage_aliases(self.cov_merge),
            )
            # we also take this opportunity to remove ''
            # from the unmatched source packages, which is caused by using `--cov=`
            self.cov_merge.source_pkgs_unmatched = [
                p for p in self.cov_merge.source_pkgs_unmatched if p
            ]

        for proc_name in self.post_processors:
            logger.debug(f"Applying post processor: {proc_name}")
            post_proc = load_processor(proc_name)
            nb_final, resources = post_proc(nb_final, resources)

        regex_replace = list(self.diff_replace) + list(nb_config.diff_replace)

        if regex_replace:
            logger.debug(f"Applying replacements: {regex_replace}")
            nb_initial_replace = regex_replace_nb(nb_initial, regex_replace)
            nb_final_replace = regex_replace_nb(nb_final, regex_replace)
        else:
            nb_initial_replace = nb_initial
            nb_final_replace = nb_final

        full_diff = diff_notebooks(nb_initial_replace, nb_final_replace)

        diff_ignore = copy.deepcopy(nb_config.diff_ignore)
        diff_ignore.update(self.diff_ignore)
        logger.debug(f"filtering diff by ignoring: {diff_ignore}")
        filtered_diff = filter_diff(full_diff, diff_ignore)

        diff_string = diff_to_string(
            nb_initial_replace,
            filtered_diff,
            use_color=self.diff_use_color,
            color_words=self.diff_color_words,
        )
        # TODO optionally write diff to file

        regen_exc = None
        if filtered_diff and self.force_regen and not exec_error:

            if hasattr(path, "close") and hasattr(path, "name"):
                path.close()
                with open(path.name, "w") as handle:
                    nbformat.write(nb_final, handle)
            else:
                nbformat.write(nb_final, str(path))

            regen_exc = NBRegressionError(
                f"Files differ and --nb-force-regen set, "
                f"regenerating file at:\n- {abspath}")

        if not raise_errors:
            pass
        elif exec_error:
            print("Diff up to exception:\n" + diff_string, file=sys.stderr)
            raise exec_error
        elif regen_exc:
            print("Diff before regeneration:\n" + diff_string, file=sys.stderr)
            raise regen_exc
        elif filtered_diff:
            raise NBRegressionError(diff_string)

        return NBRegressionResult(nb_initial, nb_final, full_diff,
                                  filtered_diff, diff_string, resources)
Exemplo n.º 45
0
 def test_subclass(self):
     """
     Subclasses are accepted too.
     """
     v = optional(instance_of(int))
     v(None, simple_attr("test"), True)
Exemplo n.º 46
0
class _SortedKey(object):
    _appid = attrib(validator=instance_of(type(u"")))
    _versions = attrib(validator=instance_of(dict))
    _side = attrib(validator=instance_of(type(u"")))
    _timing = attrib(validator=provides(_interfaces.ITiming))
    m = MethodicalMachine()
    set_trace = getattr(m, "_setTrace",
                        lambda self, f: None)  # pragma: no cover

    def wire(self, boss, mailbox, receive):
        self._B = _interfaces.IBoss(boss)
        self._M = _interfaces.IMailbox(mailbox)
        self._R = _interfaces.IReceive(receive)

    @m.state(initial=True)
    def S0_know_nothing(self):
        pass  # pragma: no cover

    @m.state()
    def S1_know_code(self):
        pass  # pragma: no cover

    @m.state()
    def S2_know_key(self):
        pass  # pragma: no cover

    @m.state(terminal=True)
    def S3_scared(self):
        pass  # pragma: no cover

    # from Boss
    @m.input()
    def got_code(self, code):
        pass

    # from Ordering
    def got_pake(self, body):
        assert isinstance(body, type(b"")), type(body)
        payload = bytes_to_dict(body)
        if "pake_v1" in payload:
            self.got_pake_good(hexstr_to_bytes(payload["pake_v1"]))
        else:
            self.got_pake_bad()

    @m.input()
    def got_pake_good(self, msg2):
        pass

    @m.input()
    def got_pake_bad(self):
        pass

    @m.output()
    def build_pake(self, code):
        with self._timing.add("pake1", waiting="crypto"):
            self._sp = SPAKE2_Symmetric(
                to_bytes(code), idSymmetric=to_bytes(self._appid))
            msg1 = self._sp.start()
        body = dict_to_bytes({"pake_v1": bytes_to_hexstr(msg1)})
        self._M.add_message("pake", body)

    @m.output()
    def scared(self):
        self._B.scared()

    @m.output()
    def compute_key(self, msg2):
        assert isinstance(msg2, type(b""))
        with self._timing.add("pake2", waiting="crypto"):
            key = self._sp.finish(msg2)
        # TODO: make B.got_key() an eventual send, since it will fire the
        # user/application-layer get_unverified_key() Deferred, and if that
        # calls back into other wormhole APIs, bad things will happen
        self._B.got_key(key)
        phase = "version"
        data_key = derive_phase_key(key, self._side, phase)
        plaintext = dict_to_bytes(self._versions)
        encrypted = encrypt_data(data_key, plaintext)
        self._M.add_message(phase, encrypted)
        # TODO: R.got_key() needs to be eventual-send too, as it can trigger
        # app-level got_verifier() and got_message() Deferreds.
        self._R.got_key(key)

    S0_know_nothing.upon(got_code, enter=S1_know_code, outputs=[build_pake])
    S1_know_code.upon(got_pake_good, enter=S2_know_key, outputs=[compute_key])
    S1_know_code.upon(got_pake_bad, enter=S3_scared, outputs=[scared])
Exemplo n.º 47
0
 def test_success(self):
     """
     Nothing happens if types match.
     """
     v = instance_of(int)
     v(None, simple_attr("test"), 42)
Exemplo n.º 48
0
 def wrapper(inst, attr, value):
     instance_of(Platform)(inst, attr, value)
     if not _is_supported(value):
         raise OkonomiyakiError(
             "Platform {0} not supported".format(value)
         )