Exemplo n.º 1
0
class Context:
    def __init__(self, context):
        self.__context = context
        self.__statements = WeakSet()

    def __hash__(self):
        return hash(self.__context)

    def __eq__(self, other):
        try:
            return str(other) == self.__context
        except AttributeError:
            return False

    def __str__(self):
        return str(self.__context)

    def __repr__(self):
        return f'Context({self.__context})'

    def __len__(self):
        return len(self.__statements)

    def unwrap(self):
        return self.__context

    def add_statement(self, stmt):
        self.__statements.add(stmt)

    def remove_statement(self, stmt):
        self.__statements.discard(stmt)

    def statements(self):
        return self.__statements
Exemplo n.º 2
0
class LazyInvalidation(object):
  def __enter__(self):
    assert threading.current_thread() == MAIN_THREAD
    assert not evaluation_stack
    self._watchMap = WeakKeyDictionary()
    self._watchable_objects = WeakSet()
    global invalidation_strategy
    invalidation_strategy._invalidate_all()
    invalidation_strategy = self

  def _watch_object(self, object):
    if object.watcher is not None and object.watcher not in self._watchMap:
      self._watchMap[object.watcher] = WeakWatchIntermediary(object, object.watcher)

  def _add_dependency(self, object):
    if evaluation_stack:
      evaluation_stack[-1].deps.append(object)

  def _unwatch_object(self, object):
    object.invalidate()
    self._watchable_objects.discard(object)

  def __exit__(self, type, value, traceback):
    global invalidation_strategy
    invalidation_strategy = LazyConstants()
    for intermediary in self._watchMap.itervalues():
      intermediary.release()
    self._watchMap.clear()

  def _invalidate_all(self):
    raise TypeError('Cannot nest lazy_invalidation contexts')
Exemplo n.º 3
0
class Signal(object):
    def __init__(self):
        self._functions = WeakSet()
        self._methods = WeakKeyDictionary()

    def __call__(self, *args, **kargs):
        # Call handler functions
        to_be_removed = []
        for func in self._functions.copy():
            try:
                func(*args, **kargs)
            except RuntimeError:
                Warning.warn(
                    'Signals func->RuntimeError: func "{}" will be removed.'.
                    format(func))
                to_be_removed.append(func)

        for remove in to_be_removed:
            self._functions.discard(remove)

        # Call handler methods
        to_be_removed = []
        emitters = self._methods.copy()
        for obj, funcs in emitters.items():
            msg_debug('obj is type "{}"'.format(type(obj)))
            for func in funcs.copy():
                try:
                    func(obj, *args, **kargs)
                except RuntimeError:
                    warnings.warn(
                        'Signals methods->RuntimeError, obj.func "{}.{}" will be removed'
                        .format(obj, func))
                    to_be_removed.append((obj, func))

        for obj, func in to_be_removed:
            self._methods[obj].discard(func)

    def connect(self, slot):
        if inspect.ismethod(slot):
            if slot.__self__ not in self._methods:
                self._methods[slot.__self__] = set()

            self._methods[slot.__self__].add(slot.__func__)

        else:
            self._functions.add(slot)

    def disconnect(self, slot):
        if inspect.ismethod(slot):
            if slot.__self__ in self._methods:
                self._methods[slot.__self__].remove(slot.__func__)
        else:
            if slot in self._functions:
                self._functions.remove(slot)

    def clear(self):
        self._functions.clear()
        self._methods.clear()
Exemplo n.º 4
0
class PrivacyService(Service):

    def __init__(self):
        self._privacy_instances = WeakSet()
        self._potential_vehicles_to_check = WeakSet()

    @property
    def privacy_instances(self):
        return self._privacy_instances

    def check_for_late_violators(self, sim):
        for privacy in self.privacy_instances:
            if not privacy.privacy_violators & PrivacyViolators.SIM:
                continue
            if not sim in privacy.violators:
                if sim in privacy.late_violators:
                    continue
                if privacy.is_sim_shoo_exempt(sim):
                    if not privacy.persistent_instance:
                        privacy.add_exempt_sim(sim)
                        if privacy.persistent_instance:
                            privacy.remove_sim_from_allowed_disallowed(sim)
                        if sim not in privacy.find_violating_sims():
                            continue
                        privacy.handle_late_violator(sim)
                        return True
                else:
                    if privacy.persistent_instance:
                        privacy.remove_sim_from_allowed_disallowed(sim)
                    if sim not in privacy.find_violating_sims():
                        continue
                    privacy.handle_late_violator(sim)
                    return True
        return False

    def add_instance(self, instance):
        self._privacy_instances.add(instance)

    def remove_instance(self, instance):
        self.privacy_instances.discard(instance)

    def stop(self):
        while self.privacy_instances:
            instance = self.privacy_instances.pop()
            instance.cleanup_privacy_instance()
        self._potential_vehicles_to_check.clear()

    def get_potential_vehicle_violators(self):
        return self._potential_vehicles_to_check

    def add_vehicle_to_monitor(self, vehicle):
        self._potential_vehicles_to_check.add(vehicle)

    def remove_vehicle_to_monitor(self, vehicle):
        self._potential_vehicles_to_check.discard(vehicle)
Exemplo n.º 5
0
class Signal(object):
    def __init__(self):
        self._functions = WeakSet()
        self._methods = WeakKeyDictionary()

    def __call__(self, *args, **kargs):
        # Call handler functions
        to_be_removed = []
        for func in self._functions.copy():
            try:
                func(*args, **kargs)
            except RuntimeError:
                Warning.warn('Signals func->RuntimeError: func "{}" will be removed.'.format(func))
                to_be_removed.append(func)

        for remove in to_be_removed:
            self._functions.discard(remove)

        # Call handler methods
        to_be_removed = []
        emitters = self._methods.copy()
        for obj, funcs in emitters.items():
            msg_debug('obj is type "{}"'.format(type(obj)))
            for func in funcs.copy():
                try:
                    func(obj, *args, **kargs)
                except RuntimeError:
                    warnings.warn('Signals methods->RuntimeError, obj.func "{}.{}" will be removed'.format(obj, func))
                    to_be_removed.append((obj, func))

        for obj, func in to_be_removed:
            self._methods[obj].discard(func)

    def connect(self, slot):
        if inspect.ismethod(slot):
            if slot.__self__ not in self._methods:
                self._methods[slot.__self__] = set()

            self._methods[slot.__self__].add(slot.__func__)

        else:
            self._functions.add(slot)

    def disconnect(self, slot):
        if inspect.ismethod(slot):
            if slot.__self__ in self._methods:
                self._methods[slot.__self__].remove(slot.__func__)
        else:
            if slot in self._functions:
                self._functions.remove(slot)

    def clear(self):
        self._functions.clear()
        self._methods.clear()
Exemplo n.º 6
0
class Observable:
    def __init__(self):
        self._observers = WeakSet()

    def add_observer(self, observer):
        self._observers.add(observer)

    def remove_observer(self, observer):
        self._observers.discard(observer)

    def notify_observers(self):
        for observer in self._observers:
            observer.observable_update(self)
Exemplo n.º 7
0
class ObservableEvent(object):
    def __init__(self, weakref=False):
        self.listeners = WeakSet() if weakref else set()

    def __iadd__(self, ob):
        self.listeners.add(ob)
        return self

    def __isub__(self, ob):
        self.listeners.discard(ob)
        return self

    def notify(self, *a, **k):
        for ob in list(self.listeners):
            ob(*a, **k)
Exemplo n.º 8
0
class ObservableEvent(object):
    def __init__(self, weakref=False):
        self.listeners = WeakSet() if weakref else set()

    def __iadd__(self, ob):
        self.listeners.add(ob)
        return self

    def __isub__(self, ob):
        self.listeners.discard(ob)
        return self

    def notify(self, *a, **k):
        for ob in list(self.listeners):
            ob(*a, **k)
Exemplo n.º 9
0
class CableBear (object):
  def __init__ (self):
    self._go_up_deferral = None
    self._waiting = True
    self.sessions = WeakSet()
    core.listen_to_dependencies(self, components=["WebServer"])

  def _all_dependencies_met (self):
    core.WebServer.set_handler("/cable_bear/ws", TCPAnalysisSession, self)

  def _handle_POXDesk_NewSession (self, event):
    if self._waiting is False: return
    self._waiting = False
    event.session.sendmsg(dict(method="new_CableBear"))
    log.debug("New POXDesk session -- starting CableBear frontend")

  def _handle_POXDesk_EndSession (self, event):
    if event.session in self.sessions:
      self.sessions.discard(event.session)
    if not self.sessions:
      self._waiting = True

  def _handle_core_GoingUpEvent (self, e):
    if self._waiting:
      self._go_up_deferral = e.get_deferral()
      log.info("Waiting for CableBear frontend to connect")

  def register_session (self, session):
    if self.sessions: return False # One at a time
    self.sessions.add(session)
    return True

  def unregister_session (self, session):
    self.sessions.discard(session)

  def add_record (self, msg):
    bad = None
    for s in self.sessions:
      try:
        s.sendmsg(msg)
      except Exception:
        log.exception("While trying to send to CableBear frontend")
        if bad is None: bad = []
        bad.append(s)
    if bad:
      for b in bad:
        self.sessions.discard(b)
class AsyncoreLoop(object):

    def __init__(self):
        self._loop_lock = Lock()
        self._started = False
        self._shutdown = False

        self._conns_lock = Lock()
        self._conns = WeakSet()

    def maybe_start(self):
        should_start = False
        did_acquire = False
        try:
            did_acquire = self._loop_lock.acquire(False)
            if did_acquire and not self._started:
                self._started = True
                should_start = True
        finally:
            if did_acquire:
                self._loop_lock.release()

        if should_start:
            thread = Thread(target=self._run_loop, name="cassandra_driver_event_loop")
            thread.daemon = True
            thread.start()
            atexit.register(partial(self._cleanup, thread))

    def _run_loop(self):
        log.debug("Starting asyncore event loop")
        with self._loop_lock:
            while True:
                try:
                    asyncore.loop(timeout=0.001, use_poll=True, count=1000)
                except Exception:
                    log.debug("Asyncore event loop stopped unexepectedly", exc_info=True)
                    break

                if self._shutdown:
                    break

                with self._conns_lock:
                    if len(self._conns) == 0:
                        break

            self._started = False

        log.debug("Asyncore event loop ended")

    def _cleanup(self, thread):
        self._shutdown = True
        log.debug("Waiting for event loop thread to join...")
        thread.join()
        log.debug("Event loop thread was joined")

    def connection_created(self, connection):
        with self._conns_lock:
            self._conns.add(connection)

    def connection_destroyed(self, connection):
        with self._conns_lock:
            self._conns.discard(connection)
Exemplo n.º 11
0
class TestWeakSet(unittest.TestCase):

    def setUp(self):
        # need to keep references to them
        self.items = [SomeClass(c) for c in ('a', 'b', 'c')]
        self.items2 = [SomeClass(c) for c in ('x', 'y', 'z')]
        self.letters = [SomeClass(c) for c in string.ascii_letters]
        self.s = WeakSet(self.items)
        self.d = dict.fromkeys(self.items)
        self.obj = SomeClass('F')
        self.fs = WeakSet([self.obj])

    def test_methods(self):
        weaksetmethods = dir(WeakSet)
        for method in dir(set):
            if method == 'test_c_api' or method.startswith('_'):
                continue
            self.assertIn(method, weaksetmethods,
                         "WeakSet missing method " + method)

    def test_new_or_init(self):
        self.assertRaises(TypeError, WeakSet, [], 2)

    def test_len(self):
        self.assertEqual(len(self.s), len(self.d))
        self.assertEqual(len(self.fs), 1)
        del self.obj
        self.assertEqual(len(self.fs), 0)

    def test_contains(self):
        for c in self.letters:
            self.assertEqual(c in self.s, c in self.d)
        # 1 is not weakref'able, but that TypeError is caught by __contains__
        self.assertNotIn(1, self.s)
        self.assertIn(self.obj, self.fs)
        del self.obj
        self.assertNotIn(SomeClass('F'), self.fs)

    def test_union(self):
        u = self.s.union(self.items2)
        for c in self.letters:
            self.assertEqual(c in u, c in self.d or c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(u), WeakSet)
        self.assertRaises(TypeError, self.s.union, [[]])
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet(self.items + self.items2)
            c = C(self.items2)
            self.assertEqual(self.s.union(c), x)

    def test_or(self):
        i = self.s.union(self.items2)
        self.assertEqual(self.s | set(self.items2), i)
        self.assertEqual(self.s | frozenset(self.items2), i)

    def test_intersection(self):
        i = self.s.intersection(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet([])
            self.assertEqual(self.s.intersection(C(self.items2)), x)

    def test_isdisjoint(self):
        self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
        self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters)))

    def test_and(self):
        i = self.s.intersection(self.items2)
        self.assertEqual(self.s & set(self.items2), i)
        self.assertEqual(self.s & frozenset(self.items2), i)

    def test_difference(self):
        i = self.s.difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c not in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.difference, [[]])

    def test_sub(self):
        i = self.s.difference(self.items2)
        self.assertEqual(self.s - set(self.items2), i)
        self.assertEqual(self.s - frozenset(self.items2), i)

    def test_symmetric_difference(self):
        i = self.s.symmetric_difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])

    def test_xor(self):
        i = self.s.symmetric_difference(self.items2)
        self.assertEqual(self.s ^ set(self.items2), i)
        self.assertEqual(self.s ^ frozenset(self.items2), i)

    def test_sub_and_super(self):
        pl, ql, rl = map(lambda s: [SomeClass(c) for c in s], ['ab', 'abcde', 'def'])
        p, q, r = map(WeakSet, (pl, ql, rl))
        self.assertTrue(p < q)
        self.assertTrue(p <= q)
        self.assertTrue(q <= q)
        self.assertTrue(q > p)
        self.assertTrue(q >= p)
        self.assertFalse(q < r)
        self.assertFalse(q <= r)
        self.assertFalse(q > r)
        self.assertFalse(q >= r)
        self.assertTrue(set('a').issubset('abc'))
        self.assertTrue(set('abc').issuperset('a'))
        self.assertFalse(set('a').issubset('cbs'))
        self.assertFalse(set('cbs').issuperset('a'))

    def test_gc(self):
        # Create a nest of cycles to exercise overall ref count check
        s = WeakSet(Foo() for i in range(1000))
        for elem in s:
            elem.cycle = s
            elem.sub = elem
            elem.set = WeakSet([elem])

    def test_subclass_with_custom_hash(self):
        # Bug #1257731
        class H(WeakSet):
            def __hash__(self):
                return int(id(self) & 0x7fffffff)
        s=H()
        f=set()
        f.add(s)
        self.assertIn(s, f)
        f.remove(s)
        f.add(s)
        f.discard(s)

    def test_init(self):
        s = WeakSet()
        s.__init__(self.items)
        self.assertEqual(s, self.s)
        s.__init__(self.items2)
        self.assertEqual(s, WeakSet(self.items2))
        self.assertRaises(TypeError, s.__init__, s, 2);
        self.assertRaises(TypeError, s.__init__, 1);

    def test_constructor_identity(self):
        s = WeakSet(self.items)
        t = WeakSet(s)
        self.assertNotEqual(id(s), id(t))

    def test_hash(self):
        self.assertRaises(TypeError, hash, self.s)

    def test_clear(self):
        self.s.clear()
        self.assertEqual(self.s, WeakSet([]))
        self.assertEqual(len(self.s), 0)

    def test_copy(self):
        dup = self.s.copy()
        self.assertEqual(self.s, dup)
        self.assertNotEqual(id(self.s), id(dup))

    def test_add(self):
        x = SomeClass('Q')
        self.s.add(x)
        self.assertIn(x, self.s)
        dup = self.s.copy()
        self.s.add(x)
        self.assertEqual(self.s, dup)
        self.assertRaises(TypeError, self.s.add, [])
        self.fs.add(Foo())
        self.assertTrue(len(self.fs) == 1)
        self.fs.add(self.obj)
        self.assertTrue(len(self.fs) == 1)

    def test_remove(self):
        x = SomeClass('a')
        self.s.remove(x)
        self.assertNotIn(x, self.s)
        self.assertRaises(KeyError, self.s.remove, x)
        self.assertRaises(TypeError, self.s.remove, [])

    def test_discard(self):
        a, q = SomeClass('a'), SomeClass('Q')
        self.s.discard(a)
        self.assertNotIn(a, self.s)
        self.s.discard(q)
        self.assertRaises(TypeError, self.s.discard, [])

    def test_pop(self):
        for i in range(len(self.s)):
            elem = self.s.pop()
            self.assertNotIn(elem, self.s)
        self.assertRaises(KeyError, self.s.pop)

    def test_update(self):
        retval = self.s.update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)
        self.assertRaises(TypeError, self.s.update, [[]])

    def test_update_set(self):
        self.s.update(set(self.items2))
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_ior(self):
        self.s |= set(self.items2)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_intersection_update(self):
        retval = self.s.intersection_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.intersection_update, [[]])

    def test_iand(self):
        self.s &= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_difference_update(self):
        retval = self.s.difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.difference_update, [[]])
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_isub(self):
        self.s -= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_symmetric_difference_update(self):
        retval = self.s.symmetric_difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_ixor(self):
        self.s ^= set(self.items2)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_inplace_on_self(self):
        t = self.s.copy()
        t |= t
        self.assertEqual(t, self.s)
        t &= t
        self.assertEqual(t, self.s)
        t -= t
        self.assertEqual(t, WeakSet())
        t = self.s.copy()
        t ^= t
        self.assertEqual(t, WeakSet())

    def test_eq(self):
        # issue 5964
        self.assertTrue(self.s == self.s)
        self.assertTrue(self.s == WeakSet(self.items))
        self.assertFalse(self.s == set(self.items))
        self.assertFalse(self.s == list(self.items))
        self.assertFalse(self.s == tuple(self.items))
        self.assertFalse(self.s == 1)

    def test_weak_destroy_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        # Create new items to be sure no-one else holds a reference
        items = [SomeClass(c) for c in ('a', 'b', 'c')]
        s = WeakSet(items)
        it = iter(s)
        next(it)             # Trigger internal iteration
        # Destroy an item
        del items[-1]
        gc.collect()    # just in case
        # We have removed either the first consumed items, or another one
        self.assertIn(len(list(it)), [len(items), len(items) - 1])
        del it
        # The removal has been committed
        self.assertEqual(len(s), len(items))

    def test_weak_destroy_and_mutate_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        items = [SomeClass(c) for c in string.ascii_letters]
        s = WeakSet(items)
        @contextlib.contextmanager
        def testcontext():
            try:
                it = iter(s)
                next(it)
                # Schedule an item for removal and recreate it
                u = SomeClass(str(items.pop()))
                gc.collect()      # just in case
                yield u
            finally:
                it = None           # should commit all removals

        with testcontext() as u:
            self.assertNotIn(u, s)
        with testcontext() as u:
            self.assertRaises(KeyError, s.remove, u)
        self.assertNotIn(u, s)
        with testcontext() as u:
            s.add(u)
        self.assertIn(u, s)
        t = s.copy()
        with testcontext() as u:
            s.update(t)
        self.assertEqual(len(s), len(t))
        with testcontext() as u:
            s.clear()
        self.assertEqual(len(s), 0)
Exemplo n.º 12
0
Arquivo: broker.py Projeto: p2k/CPL
class ROBroker(object):
    """
    Helper class which simplifies handling of range observers.
    
    Can be used as a mixin or standalone.
    """
    
    implements(IRangeObservable)
    
    __robservers = None
    
    def addRangeObserver(self, rangeObserver):
        assert IRangeObserver.providedBy(rangeObserver)
        
        if self.__robservers == None:
            self.__robservers = WeakSet()
        
        self.__robservers.add(rangeObserver)
    
    def removeRangeObserver(self, rangeObserver):
        assert IRangeObserver.providedBy(rangeObserver)
        
        if self.__robservers == None:
            return
        
        self.__robservers.discard(rangeObserver)
    
    def notifyRangeWillChange(self, rangeFrom, rangeTo, srcobject=None):
        """
        Call this to inform the respective observers of an impending range
        change.
        
        You must call `notifyRangeDidChange()` after you performed the
        range change. The call is not stackable.
        """
        
        if srcobject == None:
            srcobject = self
        self.__rangeFrom = rangeFrom
        self.__rangeTo = rangeTo
        self.__srcobject = srcobject
        
        if self.__robservers != None:
            for rangeObserver in set(self.__robservers):
                rangeObserver.observedRangeWillChange(srcobject, rangeFrom, rangeTo)
    
    def notifyRangeDidChange(self):
        """
        Call this to inform the respective observers of a performed range
        change.
        
        You must have called `notifyRangeWillChange()` before.
        """
        
        if self.__robservers != None:
            for rangeObserver in set(self.__robservers):
                rangeObserver.observedRangeDidChange(self.__srcobject, self.__rangeFrom, self.__rangeTo)
        
        del self.__rangeFrom
        del self.__rangeTo
        del self.__srcobject
    
    def notifyRangeWillIncrease(self, rangeFrom, rangeTo, srcobject=None):
        """
        Call this to inform the respective observers of an impending range
        increase.
        
        You must call `notifyRangeDidIncrease()` after you performed the
        range increase. The call is not stackable.
        """
        
        if srcobject == None:
            srcobject = self
        self.__rangeFrom = rangeFrom
        self.__rangeTo = rangeTo
        self.__srcobject = srcobject
        
        if self.__robservers != None:
            for rangeObserver in set(self.__robservers):
                rangeObserver.observedRangeWillIncrease(srcobject, rangeFrom, rangeTo)
    
    def notifyRangeDidIncrease(self):
        """
        Call this to inform the respective observers of a performed range
        increase.
        
        You must have called `notifyRangeWillIncrease()` before.
        """
        
        if self.__robservers != None:
            for rangeObserver in set(self.__robservers):
                rangeObserver.observedRangeDidIncrease(self.__srcobject, self.__rangeFrom, self.__rangeTo)
        
        del self.__rangeFrom
        del self.__rangeTo
        del self.__srcobject
    
    def notifyRangeWillDecrease(self, rangeFrom, rangeTo, srcobject=None):
        """
        Call this to inform the respective observers of an impending range
        decrease.
        
        You must call `notifyRangeDidDecrease()` after you performed the
        range decrease. The call is not stackable.
        """
        
        if srcobject == None:
            srcobject = self
        self.__rangeFrom = rangeFrom
        self.__rangeTo = rangeTo
        self.__srcobject = srcobject
        
        if self.__robservers != None:
            for rangeObserver in set(self.__robservers):
                rangeObserver.observedRangeWillDecrease(srcobject, rangeFrom, rangeTo)
    
    def notifyRangeDidDecrease(self):
        """
        Call this to inform the respective observers of a performed range
        decrease.
        
        You must have called `notifyRangeWillDecrease()` before.
        """
        
        if self.__robservers != None:
            for rangeObserver in set(self.__robservers):
                rangeObserver.observedRangeDidDecrease(self.__srcobject, self.__rangeFrom, self.__rangeTo)
        
        del self.__rangeFrom
        del self.__rangeTo
        del self.__srcobject
Exemplo n.º 13
0
class TileableData(SerializableWithKey, Tileable):
    __slots__ = '__weakref__', '_siblings', '_cix', '_entities'
    _no_copy_attrs_ = SerializableWithKey._no_copy_attrs_ | {'_cix'}

    # required fields
    _op = KeyField('op')
    _shape = TupleField('shape',
                        ValueType.int64,
                        on_serialize=on_serialize_shape,
                        on_deserialize=on_deserialize_shape)
    # optional fields
    # `nsplits` means the sizes of chunks for each dimension
    _nsplits = TupleField('nsplits',
                          ValueType.tuple(ValueType.uint64),
                          on_serialize=on_serialize_nsplits)
    _extra_params = DictField('extra_params',
                              key_type=ValueType.string,
                              on_deserialize=AttributeDict)

    def __init__(self, *args, **kwargs):
        extras = AttributeDict(
            (k, kwargs.pop(k)) for k in set(kwargs) - set(self.__slots__))
        kwargs['_extra_params'] = kwargs.pop('_extra_params', extras)
        if kwargs.get('_nsplits', None) is not None:
            kwargs['_nsplits'] = tuple(tuple(s) for s in kwargs['_nsplits'])

        super(TileableData, self).__init__(*args, **kwargs)

        if hasattr(self, '_chunks') and self._chunks:
            self._chunks = sorted(self._chunks, key=attrgetter('index'))

        self._entities = WeakSet()

    @property
    def ndim(self):
        return len(self.shape)

    def __len__(self):
        try:
            return self.shape[0]
        except IndexError:
            if build_mode().is_build_mode:
                return 0
            raise TypeError('len() of unsized object')

    @property
    def shape(self):
        if hasattr(self, '_shape') and self._shape is not None:
            return self._shape
        if hasattr(self, '_nsplits') and self._nsplits is not None:
            self._shape = tuple(
                builtins.sum(nsplit) for nsplit in self._nsplits)
            return self._shape

    def _update_shape(self, new_shape):
        self._shape = new_shape

    @property
    def chunk_shape(self):
        if hasattr(self, '_nsplits') and self._nsplits is not None:
            return tuple(map(len, self._nsplits))

    @property
    def chunks(self):
        return getattr(self, '_chunks', None)

    @property
    def op(self):
        return getattr(self, '_op', None)

    @property
    def nsplits(self):
        return getattr(self, '_nsplits', None)

    @nsplits.setter
    def nsplits(self, new_nsplits):
        self._nsplits = new_nsplits

    @property
    def size(self):
        return np.prod(self.shape).item()

    @property
    def inputs(self):
        return self.op.inputs or []

    @inputs.setter
    def inputs(self, new_inputs):
        self.op.inputs = new_inputs

    @property
    def params(self):
        # params return the properties which useful to rebuild a new tileable object
        return {'shape': self.shape}

    @property
    def extra_params(self):
        return self._extra_params

    @property
    def cix(self):
        if self.ndim == 0:
            return ChunksIndexer(self)

        try:
            if getattr(self, '_cix', None) is None:
                self._cix = ChunksIndexer(self)
            return self._cix
        except (TypeError, ValueError):
            return ChunksIndexer(self)

    @property
    def entities(self):
        return self._entities

    def is_coarse(self):
        return not hasattr(self, '_chunks') or self._chunks is None or len(
            self._chunks) == 0

    def to_coarse(self):
        if self.is_coarse():
            return self
        new_entity = self.copy()
        new_entity._obj_set('_id', self._id)
        new_entity._chunks = None
        if self.inputs is None or len(self.inputs) == 0:
            new_entity.extra_params.update({'raw_chunk_size': self.nsplits})
        return new_entity

    def is_sparse(self):
        return self.op.is_sparse()

    issparse = is_sparse

    @enter_build_mode
    def attach(self, entity):
        self._entities.add(entity)

    @enter_build_mode
    def detach(self, entity):
        self._entities.discard(entity)

    def tiles(self):
        return handler.tiles(self)

    def single_tiles(self):
        return handler.single_tiles(self)

    @kernel_mode
    def build_graph(self,
                    graph=None,
                    cls=DAG,
                    tiled=False,
                    compose=True,
                    executed_keys=None):
        from .utils import build_fetch

        executed_keys = executed_keys or []
        if tiled and self.is_coarse():
            self.tiles()

        graph = graph if graph is not None else cls()
        keys = None

        if tiled:
            nodes = list(c.data for c in self.chunks)
            keys = list(c.key for c in self.chunks)
        else:
            nodes = list(self.op.outputs)

        visited = set()
        while len(nodes) > 0:
            node = nodes.pop()

            # replace executed tensor/chunk by tensor/chunk with fetch op
            if node.key in executed_keys:
                node = build_fetch(node, coarse=True).data

            visited.add(node)
            if not graph.contains(node):
                graph.add_node(node)
            children = node.inputs or []
            for c in children:
                if c.key in executed_keys:
                    continue
                if not graph.contains(c):
                    graph.add_node(c)
                if not graph.has_successor(c, node):
                    graph.add_edge(c, node)
            nodes.extend([
                c for c in itertools.chain(
                    *[inp.op.outputs for inp in node.inputs or []])
                if c not in visited
            ])
        if tiled and compose:
            graph.compose(keys=keys)

        if not tiled and any(not n.is_coarse() for n in graph):
            return self._to_coarse_graph(graph)

        return graph

    @staticmethod
    def _to_coarse_graph(graph):
        new_graph = type(graph)()
        visited = dict()
        for n in graph:
            if n not in visited:
                new_node = n.to_coarse()
                visited[n] = new_node
                new_graph.add_node(new_node)
            for succ in graph.successors(n):
                if succ not in visited:
                    new_node = succ.to_coarse()
                    visited[succ] = new_node
                    new_graph.add_node(new_node)
                new_graph.add_edge(visited[n], visited[succ])
        return new_graph

    def visualize(self, graph_attrs=None, node_attrs=None, **kw):
        from graphviz import Source

        g = self.build_graph(**kw)
        dot = g.to_dot(graph_attrs=graph_attrs, node_attrs=node_attrs)

        return Source(dot)

    def execute(self, session=None, **kw):
        from .session import Session

        if session is None:
            session = Session.default_or_local()
        return session.run(self, **kw)

    def fetch(self, session=None, **kw):
        from .session import Session

        if session is None:
            session = Session.default_or_local()
        return session.fetch(self, **kw)

    def _set_execute_session(self, session):
        _cleaner.register(self, session)

    _execute_session = property(fset=_set_execute_session)
Exemplo n.º 14
0
class Window(object):

    supported_drag_types = [const.DOC_ID_LIST_PBOARD_TYPE, ak.NSFilenamesPboardType]
    app = WeakProperty()

    def __init__(self, app, state=None):
        self.app = app
        self._current_editor = None
        self.wc = WindowController(self)
        self.state = state
        self.command = CommandBar(self, app.text_commander)
        self.projects = KVOList()
        self.recent = self._suspended_recent = RecentItemStack(100)
        self._recent_history = None
        self.window_settings_loaded = False
        self.no_document_undo_manager = UndoManager()
        self.menu = self.make_context_menu()
        self.dirty_editors = WeakSet()

    def window_did_load(self):
        wc = self.wc
        wc.docsView.default_menu = self.menu
        wc.docsView.setRefusesFirstResponder_(True)
        wc.docsView.registerForDraggedTypes_(self.supported_drag_types)
        wc.plusButton.setRefusesFirstResponder_(True)
        wc.plusButton.setImage_(load_image(const.PLUS_BUTTON_IMAGE))
        wc.propsViewButton.setRefusesFirstResponder_(True)
        wc.propsViewButton.setImage_(load_image(const.PROPS_DOWN_BUTTON_IMAGE))
        wc.propsViewButton.setAlternateImage_(load_image(const.PROPS_UP_BUTTON_IMAGE))

        self._setstate(self._state)
        self._state = None

        if not self.projects:
            self.new_project()

    def make_context_menu(self):
        def has_path(item):
            return item and item.file_path
        return Menu([
            MenuItem("Copy Path", self.copy_path, is_enabled=has_path),
            MenuItem("Close", self.close_item, "Command+w"),
        ])

    def _setstate(self, state):
        if state:
            projects = state.get("projects")
            if projects is None:
                projects = state.get("project_serials", []) # legacy
            for serial in projects:
                proj = Project(self, serial=serial)
                self.projects.append(proj)
            for proj_index, doc_index in state.get("recent_items", []):
                if proj_index < len(self.projects):
                    proj = self.projects[proj_index]
                    if doc_index == "<project>":
                        self.recent.push(proj.id)
                    elif doc_index < len(proj.editors):
                        doc = proj.editors[doc_index]
                        self.recent.push(doc.id)
            if 'window_settings' in state:
                self.window_settings = state['window_settings']
            with self.suspend_recent_updates():
                pass # focus recent

    def __getstate__(self):
        if self._state is not None:
            return self._state
        def iter_settings():
            indexes = {}
            serials = []
            for i, project in enumerate(self.projects):
                serial = project.serialize()
                if serial:
                    serials.append(serial)
                indexes[project.id] = [i, "<project>"]
                for j, doc in enumerate(project.editors):
                    indexes[doc.id] = [i, j]
            yield "projects", serials
            rits = []
            for ident in self.recent:
                pair = indexes.get(ident)
                if pair is not None:
                    rits.append(pair)
            yield "recent_items", rits
            yield "window_settings", self.window_settings
        return {key: val for key, val in iter_settings() if val}

    def __setstate__(self, state):
        assert not hasattr(self, '_state'), 'can only be called once'
        self._state = state

    state = property(__getstate__, __setstate__)

    def discard(self, item):
        ident = None if item is None else item.id
        recent = self.recent
        update_current = item in self.selected_items or not self.selected_items
        with self.suspend_recent_updates(update_current):
            for project in list(self.projects):
                pid = project.id
                for editor in list(project.editors):
                    did = editor.id
                    if ident in (pid, did):
                        recent.discard(did)
                        assert editor.project is project, (editor.project, project)
                        editor.close()
                if ident == pid:
                    recent.discard(pid)
                    self.projects.remove(project)
                    project.close()

    def focus(self, value, offset=1):
        """Change the current document by navigating the tree or recent documents

        :param value: One of the direction constants in
        `editxt.constants` or an editor's file path. `NEXT` and
        `PREVIOUS` select items in the recent editors stack. `UP` and
        `DOWN` move up or down in the tree.
        :param offset: The number of positions to move in direction.
        :returns: True if a new editor was focused, otherwise false.
        """
        def focus(ident):
            for project in self.projects:
                if project.id == ident:
                    self.current_editor = project
                    return True
                else:
                    for editor in project.editors:
                        if editor.id == ident:
                            self.current_editor = editor
                            return True
            return False
        def get_item_in_tree(current, offset):
            if current is not None:
                items = []
                index = 0
                stop = sys.maxsize
                for project in self.projects:
                    items.append(project)
                    if current.id == project.id:
                        stop = index + offset
                        if stop <= index:
                            break
                    index += 1
                    if project.expanded:
                        for editor in project.editors:
                            items.append(editor)
                            if current.id == editor.id:
                                stop = index + offset
                                if stop <= index:
                                    break
                            index += 1
                if 0 <= stop < len(items):
                    return items[stop]
            return None
        if isinstance(value, const.Constant):
            if value == const.PREVIOUS or value == const.NEXT:
                history = ((list(reversed(self.recent)) + [0])
                           if self._recent_history is None
                           else self._recent_history)
                if value == const.PREVIOUS:
                    offset = offset + history[-1]
                else:
                    offset = history[-1] - offset
                if 0 <= offset < len(history) - 1:
                    ok = focus(history[offset])
                    if ok:
                        history[-1] = offset
                        self._recent_history = history
                    return ok
                return False
            if value == const.UP:
                offset = -offset
            editor = get_item_in_tree(self.current_editor, offset)
            if editor is not None:
                self.current_editor = editor
                return True
        if isinstance(value, (Editor, Project)):
            return focus(value.id)
        return False

    @contextmanager
    def suspend_recent_updates(self, update_current=True):
        self.recent = RecentItemStack(1)
        try:
            yield
        finally:
            self.recent = recent = self._suspended_recent
        if not update_current:
            return
        lookup = {}
        for project in self.projects:
            lookup[project.id] = project
            lookup.update((e.id, e) for e in project.editors)
        current = self.current_editor
        current_id = None if current is None else current.id
        if current_id in lookup and recent and current_id == recent[-1]:
            return
        while True:
            ident = recent.pop()
            if ident is None:
                if self.projects:
                    for project in self.projects:
                        if project.expanded and project.editors:
                            self.current_editor = project.editors[0]
                            break
                    else:
                        self.current_editor = self.projects[0]
                break
            item = lookup.get(ident)
            if item is not None:
                self.current_editor = item
                break

    def do_menu_command(self, sender):
        self.app.text_commander.do_menu_command(self.current_editor, sender)

    def validate_menu_command(self, item):
        return self.app.text_commander.is_menu_command_enabled(self.current_editor, item)

    @property
    def current_editor(self):
        return self._current_editor
    @current_editor.setter
    def current_editor(self, editor):
        self._current_editor = editor
        self._recent_history = None
        if editor is None:
            self.wc.setup_current_editor(None)
            self.selected_items = []
            return
        if self.wc.is_current_view(editor.main_view):
            editor.focus()
        else:
            self.recent.push(editor.id)
            if self.wc.setup_current_editor(editor):
                if isinstance(editor, Editor) \
                        and self.find_project_with_editor(editor) is None:
                    self.insert_items([editor])
        if not self.selected_items or editor is not self.selected_items[0]:
            self.selected_items = [editor]

    @property
    def selected_items(self):
        return self.wc.selected_items
    @selected_items.setter
    def selected_items(self, value):
        self.wc.selected_items = value

    def selected_editor_changed(self):
        selected = self.selected_items
        if selected and selected[0] is not self.current_editor:
            self.current_editor = selected[0]

    def on_dirty_status_changed(self, editor, dirty):
        if dirty:
            self.dirty_editors.add(editor)
        else:
            self.dirty_editors.discard(editor)
        self.wc.on_dirty_status_changed(editor, self.is_dirty)

    @property
    def is_dirty(self):
        return bool(self.dirty_editors)

    def iter_editors_of_document(self, doc):
        for project in self.projects:
            for editor in project.iter_editors_of_document(doc):
                yield editor

    def should_select_item(self, outlineview, item):
        return True

    def open_documents(self):
        editor = self.current_editor
        if editor is not None and editor.dirname():
            directory = editor.dirname()
        else:
            directory = os.path.expanduser("~")
        self.wc.open_documents(directory, None, self.open_paths)

    def save_as(self):
        self.save(prompt=True)

    def save(self, prompt=False):
        editor = self.current_editor
        if isinstance(editor, Editor):
            editor.save(prompt=prompt)

    def reload_current_document(self):
        editor = self.current_editor
        if isinstance(editor, Editor):
            editor.document.reload_document()

    def save_document_as(self, editor, save_with_path):
        """Prompt for path to save document

        :param editor: The editor of the document to be saved.
        :param save_with_path: A callback accepting a sinlge parameter (the
        chosen file path) that does the work of actually saving the file.
        Call with ``None`` to cancel the save operation.
        """
        directory, filename = self._directory_and_filename(editor.file_path)
        self.wc.save_document_as(directory, filename, save_with_path)

    def prompt_to_overwrite(self, editor, save_with_path):
        """Prompt to overwrite the given editor's document's file path

        :param editor: The editor of the document to be saved.
        :param save_with_path: A callback accepting a sinlge parameter (the
        chosen file path) that does the work of actually saving the file.
        Call with ``None`` to cancel the save operation.
        """
        def save_as():
            self.save_document_as(editor, save_with_path)
        if editor is None:
            diff_with_original = None
        else:
            def diff_with_original():
                from editxt.command.diff import diff
                from editxt.command.parser import Options
                diff(editor, Options(file=editor.file_path))
        self.wc.prompt_to_overwrite(
            editor.file_path, save_with_path, save_as, diff_with_original)

    def prompt_to_close(self, editor, save_discard_or_cancel, save_as=True):
        """Prompt to see if the document can be closed

        :param editor: The editor of the document to be closed.
        :param save_discard_or_cancel: A callback to be called with the outcome
        of the prompt: save (True), discard (False), or cancel (None).
        :param save_as: Boolean, if true prompt to "save as" (with dialog),
        otherwise prompt to save (without dialog).
        """
        self.current_editor = editor
        self.wc.prompt_to_close(editor.file_path, save_discard_or_cancel, save_as)

    @staticmethod
    def _directory_and_filename(path):
        if isabs(path):
            directory, filename = split(path)
            while directory and directory != sep and not isdir(directory):
                directory = dirname(directory)
        else:
            directory = None
            filename = basename(path)
        assert filename, path
        if not directory:
            # TODO editor.project.path or path of most recent document
            # None -> directory used in the previous invocation of the panel
            directory = None
        return directory, filename

    def new_project(self):
        project = Project(self)
        editor = project.create_editor()
        self.projects.append(project)
        self.current_editor = editor
        return project

    def toggle_properties_pane(self):
        tree_rect = self.wc.docsScrollview.frame()
        prop_rect = self.wc.propsView.frame()
        if self.wc.propsViewButton.state() == ak.NSOnState:
            # hide properties view
            tree_rect.size.height += prop_rect.size.height - 1.0
            tree_rect.origin.y = prop_rect.origin.y
            prop_rect.size.height = 0.0
        else:
            # show properties view
            tree_rect.size.height -= 115.0
            if prop_rect.size.height > 0:
                tree_rect.size.height += (prop_rect.size.height - 1.0)
            tree_rect.origin.y = prop_rect.origin.y + 115.0
            prop_rect.size.height = 116.0
            self.wc.propsView.setHidden_(False)
        resize_tree = fn.NSDictionary.dictionaryWithObjectsAndKeys_(
            self.wc.docsScrollview, ak.NSViewAnimationTargetKey,
            fn.NSValue.valueWithRect_(tree_rect), ak.NSViewAnimationEndFrameKey,
            None,
        )
        resize_props = fn.NSDictionary.dictionaryWithObjectsAndKeys_(
            self.wc.propsView, ak.NSViewAnimationTargetKey,
            fn.NSValue.valueWithRect_(prop_rect), ak.NSViewAnimationEndFrameKey,
            None,
        )
        anims = fn.NSArray.arrayWithObjects_(resize_tree, resize_props, None)
        animation = ak.NSViewAnimation.alloc().initWithViewAnimations_(anims)
        #animation.setAnimationBlockingMode_(NSAnimationBlocking)
        animation.setDuration_(0.25)
        animation.startAnimation()

    def find_project_with_editor(self, editor):
        for proj in self.projects:
            for e in proj.editors:
                if editor is e:
                    return proj
        return None

    def find_project_with_path(self, path):
        for proj in self.projects:
            p = proj.file_path
            if p and os.path.exists(p) and os.path.samefile(p, path):
                return proj
        return None

    def get_current_project(self, create=False):
        item = self.current_editor
        if item is not None:
            return item if isinstance(item, Project) else item.project
        if self.projects:
            for project in self.projects:
                if project.expanded:
                    return project
            return self.projects[0]
        if create:
            proj = Project(self)
            self.projects.append(proj)
            return proj
        return None

    def tooltip_for_item(self, view, item):
        it = view.realItemForOpaqueItem_(item)
        null = it is None or it.file_path is None
        return None if null else user_path(it.file_path)

    def should_edit_item(self, col, item):
        if col.isEditable():
            obj = representedObject(item)
            return isinstance(obj, Project) and obj.can_rename()
        return False

    def copy_path(self, item):
        """Copy item path to pasteboard

        Put newline-delimited paths on pasteboard if there are multiple
        items selected and the given item is one of them.
        """
        selected = self.selected_items
        if item not in selected:
            selected = [item]
        Pasteboard().text = "\n".join(item.file_path for item in selected)

    def close_item(self, item):
        """Close editor or project

        Close all selected items if there are multiple items selected
        and the given item is one of them.
        """
        def do_close(should_close):
            if should_close:
                for item in selected:
                    self.discard(item)
        selected = self.selected_items
        if item not in selected:
            selected = [item]
        self.app.async_interactive_close(selected, do_close)

    def window_did_become_key(self, window):
        editor = self.current_editor
        if isinstance(editor, Editor):
            # TODO refactor Editor to support check_for_external_changes()
            editor.document.check_for_external_changes(window)

    def should_close(self, do_close):
        """Determine if the window should be closed

        If this returns false an interactive close loop will be started, which
        may eventually result in the window being closed.
        """
        def iter_dirty_editors():
            app = self.app
            for proj in self.projects:
                wins = app.find_windows_with_project(proj)
                if wins == [self]:
                    for editor in proj.dirty_editors():
                        doc_windows = app.iter_windows_with_editor_of_document(
                                            editor.document)
                        if all(win is self for win in doc_windows):
                            yield editor
        if next(iter_dirty_editors(), None) is None:
            return True
        def callback(should_close):
            if should_close:
                do_close()
        self.app.async_interactive_close(iter_dirty_editors(), callback)
        return False

    def window_will_close(self):
        self.app.discard_window(self)

    def _get_window_settings(self):
        return dict(
            frame_string=str(self.wc.frame_string),
            splitter_pos=self.wc.splitter_pos,
            properties_hidden=self.wc.properties_hidden,
        )
    def _set_window_settings(self, settings):
        fs = settings.get("frame_string")
        if fs is not None:
            self.wc.frame_string = fs
        sp = settings.get("splitter_pos")
        if sp is not None:
            self.wc.splitter_pos = sp
        self.wc.properties_hidden = settings.get("properties_hidden", False)
        self.window_settings_loaded = True
    window_settings = property(_get_window_settings, _set_window_settings)

    def close(self):
        wc = self.wc
        if wc is not None:
            self.window_settings_loaded = False
            while self.projects:
                self.projects.pop().close()
            #wc.docsController.setContent_(None)
            #self.wc = None

    # drag/drop logic ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

    def is_project_drag(self, info):
        """Return True if only projects are being dropped else False"""
        pb = info.draggingPasteboard()
        t = pb.availableTypeFromArray_(self.supported_drag_types)
        if t == const.DOC_ID_LIST_PBOARD_TYPE:
            id_list = pb.propertyListForType_(const.DOC_ID_LIST_PBOARD_TYPE)
            items = self.iter_dropped_id_list(id_list)
            return all(isinstance(item, Project) for item in items)
        elif t == ak.NSFilenamesPboardType:
            paths = pb.propertyListForType_(ak.NSFilenamesPboardType)
            return all(Project.is_project_path(path) for path in paths)
        return False

    def get_id_path_pairs(self, items):
        """Get a list of (<item id>, <item path>) pairs for given items

        :param items: A list of editors and/or projects.
        :returns: A list of two-tuples (<item id>, <item path>). <item id> is
        an opaque internal identifier for the document, and <item path> is
        the file system path of the item or ``None`` if the item does not have
        a path.
        """
        def pair(item):
            path = item.file_path
            return (item.id, path if path and os.path.exists(path) else None)
        return [pair(item) for item in items]

    def validate_drop(self, outline_view, info, item, index):
        if self.is_project_drag(info):
            if item is not None:
                obj = representedObject(item)
                path = self.wc.docsController.indexPathForObject_(obj)
                if path is not None:
                    index = path.indexAtPosition_(0)
                    outline_view.setDropItem_dropChildIndex_(None, index)
                else:
                    return ak.NSDragOperationNone
            elif index < 0:
                outline_view.setDropItem_dropChildIndex_(None, len(self.projects))
            return ak.NSDragOperationMove
        else:
            # text document drag
            if item is not None:
                obj = representedObject(item)
                if isinstance(obj, Project):
                    if index < 0:
                        #outline_view.setDropItem_dropChildIndex_(item, 0)
                        # the following might be more correct, but is too confusing
                        outline_view.setDropItem_dropChildIndex_(item, len(obj.editors))
                else:
                    return ak.NSDragOperationNone # document view cannot have children
            else:
                if index < 0:
                    # drop on listview background
                    last_proj_index = len(self.projects) - 1
                    if last_proj_index > -1:
                        # we have at least one project
                        path = fn.NSIndexPath.indexPathWithIndex_(last_proj_index)
                        node = self.wc.docsController.nodeAtArrangedIndexPath_(path)
                        proj = representedObject(node)
                        outline_view.setDropItem_dropChildIndex_(node, len(proj.editors))
                    else:
                        outline_view.setDropItem_dropChildIndex_(None, -1)
                elif index == 0:
                    return ak.NSDragOperationNone # prevent drop above top project
        op = info.draggingSourceOperationMask()
        if op not in [ak.NSDragOperationCopy, ak.NSDragOperationGeneric]:
            op = ak.NSDragOperationMove
        return op

    def accept_drop(self, view, pasteboard, parent=const.CURRENT, index=-1, action=const.MOVE):
        """Accept drop operation

        :param view: The view on which the drop occurred.
        :param pasteboard: NSPasteboard object.
        :param parent: The parent item in the outline view.
        :param index: The index in the outline view or parent item at which the
            drop occurred.
        :param action: The action to perform when dragging (see
        ``insert_items(..., action)``). Ignored if the items being dropped are
        paths.
        :returns: True if the drop was accepted, otherwise False.
        """
        pb = pasteboard
        t = pb.availableTypeFromArray_(self.supported_drag_types)
        if t == const.DOC_ID_LIST_PBOARD_TYPE:
            id_list = pb.propertyListForType_(const.DOC_ID_LIST_PBOARD_TYPE)
            items = self.iter_dropped_id_list(id_list)
        elif t == ak.NSFilenamesPboardType:
            paths = pb.propertyListForType_(ak.NSFilenamesPboardType)
            items = self.iter_dropped_paths(paths)
            action = None
        else:
            assert t is None, t
            return False
        return bool(self.insert_items(items, parent, index, action))

    def iter_dropped_id_list(self, id_list):
        """Iterate TextDocument objects referenced by pasteboard (if any)"""
        if not id_list:
            return
        for ident in id_list:
            item = self.app.find_item_with_id(ident)
            if item is not None:
                yield item

    def open_url(self, url, link, focus=True):
        """Open file specified by URL

        The URL must have two attributes:
        - path : The path to the file. The first leading slash is
          stripped, so absolute paths must have an extra slash.
        - query : A query string from which an optional "goto" parameter
          may be parsed. The goto parameter specifies a line or line +
          selection (`line.sel_start.sel_length`) to goto/select after
          opening the file.

        :param url: Parsed URL. See `urllib.parse.urlparse` for structure.
        :param link: The original URL string.
        :param focus: Focus newly opened editor.
        """
        path = unquote(url.path)
        if path.startswith("/"):
            path = path[1:]
        editors = self.open_paths([path], focus=focus)
        if editors:
            assert len(editors) == 1, (link, editors)
            query = parse_qs(url.query)
            if "goto" in query:
                goto = query["goto"][0]
                try:
                    if "." in goto:
                        line, start, end = goto.split(".")
                        num = (int(line), int(start), int(end))
                    else:
                        num = int(goto)
                except ValueError:
                    log.debug("invalid goto: %r (link: %s)", goto, link)
                else:
                    editors[0].goto_line(num)

    def open_paths(self, paths, focus=True):
        return self.insert_items(self.iter_dropped_paths(paths), focus=focus)

    def iter_dropped_paths(self, paths):
        if not paths:
            return
        for path in paths:
            if path is None or os.path.isfile(path) or not os.path.exists(path):
                yield self.app.document_with_path(path)
#            elif os.path.isdir(path):
#                yield Project(self, name=os.path.dirname(path))
            else:
                log.info("cannot open path: %s", path)

    def insert_items(self, items, project=const.CURRENT, index=-1, action=None,
                     focus=True):
        """Insert items into the document tree

        :param items: An iterable of projects, editors, and/or documents.
        :param project: The parent project into which items are being inserted.
            Documents will be inserted in the current project if unspecified.
        :param index: The index in the outline view or parent project at which
            the item(s) should be inserted. Add after current if < 0 (default).
        :param action: What to do with items that are already open in
            this window:

            - None : insert new item(s), but do not change existing item(s).
            - MOVE : move existing item(s) to index.
            - COPY : copy item(s) to index.

            A file is considered to be "existing" if there is an editor
            with the same path in the project where it is being
            inserted. A project is considered to be "existing" if there
            is a project with the same path in the window where it is
            being inserted.
        :param focus: Focus most recent newly opened editor if true (the
            default).
        :returns: A list of editors and projects that were inserted.
        """
        if (project is not None and
            project != const.CURRENT and
            project.window is not self):
            raise ValueError("project does not belong to this window")
        inserted = []
        focus_editor = None
        with self.suspend_recent_updates(update_current=False):
            pindex = index
            if pindex < 0:
                pindex = len(self.projects)
            for is_project_group, group in groupby(items, self.is_project):
                if is_project_group:
                    for item in group:
                        project, pindex = self._insert_project(item, pindex, action)
                        if project is not None:
                            inserted.append(project)
                            focus_editor = project
                    # Reset index since the project into which non-project
                    # items will be inserted has changed.
                    index = -1
                else:
                    if project == const.CURRENT or project is None:
                        if index >= 0:
                            raise NotImplementedError
                        project = self.get_current_project(create=True)
                    inserts, focus_editor = project.insert_items(group, index, action)
                    inserted.extend(inserts)
        if focus and focus_editor is not None:
            self.current_editor = focus_editor
        return inserted

    def is_project(self, item):
        """Return true if item can be inserted as a project"""
        # TODO return true if item is a directory path
        return isinstance(item, Project)

    def _insert_project(self, item, index, action):
        if action != const.MOVE:
            raise NotImplementedError('cannot copy project yet')
        if item.window is self:
            window = self
            pindex = self.projects.index(item)
            if pindex == index:
                return None, index
            if pindex - index <= 0:
                index -= 1
        else:
            window = item.window

        # BEGIN HACK crash on remove project with editors
        editors = item.editors
        tmp, editors[:] = list(editors), []
        window.projects.remove(item) # this line should be all that's necessary
        editors.extend(tmp)
        # END HACK

        item.window = self
        self.projects.insert(index, item)
        return item, index + 1

    def show(self, sender):
        self.wc.showWindow_(sender)

    @property
    def undo_manager(self):
        editor = self.current_editor
        if editor is None:
            return self.no_document_undo_manager
        return editor.undo_manager
Exemplo n.º 15
0
class TileableData(SerializableWithKey, Tileable):
    __slots__ = '__weakref__', '_siblings', '_cix', '_entities'
    _no_copy_attrs_ = SerializableWithKey._no_copy_attrs_ | {'_cix'}

    # required fields
    _op = KeyField('op')
    # optional fields
    # `nsplits` means the sizes of chunks for each dimension
    _nsplits = TupleField('nsplits', ValueType.tuple(ValueType.uint64),
                          on_serialize=on_serialize_nsplits)
    _extra_params = DictField('extra_params', key_type=ValueType.string, on_deserialize=AttributeDict)

    def __init__(self, *args, **kwargs):
        extras = AttributeDict((k, kwargs.pop(k)) for k in set(kwargs) - set(self.__slots__))
        kwargs['_extra_params'] = kwargs.pop('_extra_params', extras)
        if kwargs.get('_nsplits', None) is not None:
            kwargs['_nsplits'] = tuple(tuple(s) for s in kwargs['_nsplits'])

        super(TileableData, self).__init__(*args, **kwargs)

        if hasattr(self, '_chunks') and self._chunks:
            self._chunks = sorted(self._chunks, key=attrgetter('index'))

        self._entities = WeakSet()

    @property
    def chunk_shape(self):
        if hasattr(self, '_nsplits') and self._nsplits is not None:
            return tuple(map(len, self._nsplits))

    @property
    def chunks(self):
        return getattr(self, '_chunks', None)

    @property
    def op(self):
        return getattr(self, '_op', None)

    @property
    def nsplits(self):
        return getattr(self, '_nsplits', None)

    @nsplits.setter
    def nsplits(self, new_nsplits):
        self._nsplits = new_nsplits

    @property
    def inputs(self):
        return self.op.inputs or []

    @inputs.setter
    def inputs(self, new_inputs):
        self.op.inputs = new_inputs

    @property
    def params(self):
        # params return the properties which useful to rebuild a new tileable object
        return dict()

    @property
    def extra_params(self):
        return self._extra_params

    @property
    def cix(self):
        if self.ndim == 0:
            return ChunksIndexer(self)

        try:
            if getattr(self, '_cix', None) is None:
                self._cix = ChunksIndexer(self)
            return self._cix
        except (TypeError, ValueError):
            return ChunksIndexer(self)

    @property
    def entities(self):
        return self._entities

    def is_coarse(self):
        return not hasattr(self, '_chunks') or self._chunks is None or len(self._chunks) == 0

    def is_sparse(self):
        return self.op.is_sparse()

    issparse = is_sparse

    @enter_build_mode
    def attach(self, entity):
        self._entities.add(entity)

    @enter_build_mode
    def detach(self, entity):
        self._entities.discard(entity)

    def execute(self, session=None, **kw):
        from .session import Session

        if session is None:
            session = Session.default_or_local()
        return session.run(self, **kw)

    def fetch(self, session=None, **kw):
        from .session import Session

        if session is None:
            session = Session.default_or_local()
        return session.fetch(self, **kw)

    def _set_execute_session(self, session):
        _cleaner.register(self, session)

    _execute_session = property(fset=_set_execute_session)
Exemplo n.º 16
0
class TestWeakSet(unittest.TestCase):
    def setUp(self):
        self.items = [ustr(c) for c in ('a', 'b', 'c')]
        self.items2 = [ustr(c) for c in ('x', 'y', 'z')]
        self.ab_items = [ustr(c) for c in 'ab']
        self.abcde_items = [ustr(c) for c in 'abcde']
        self.def_items = [ustr(c) for c in 'def']
        self.ab_weakset = WeakSet(self.ab_items)
        self.abcde_weakset = WeakSet(self.abcde_items)
        self.def_weakset = WeakSet(self.def_items)
        self.letters = [ustr(c) for c in string.ascii_letters]
        self.s = WeakSet(self.items)
        self.d = dict.fromkeys(self.items)
        self.obj = ustr('F')
        self.fs = WeakSet([self.obj])

    def test_methods(self):
        weaksetmethods = dir(WeakSet)
        for method in dir(set):
            if method == 'test_c_api' or method.startswith('_'):
                continue
            self.assertIn(method, weaksetmethods,
                          'WeakSet missing method ' + method)

    def test_new_or_init(self):
        self.assertRaises(TypeError, WeakSet, [], 2)

    def test_len(self):
        self.assertEqual(len(self.s), len(self.d))
        self.assertEqual(len(self.fs), 1)
        del self.obj
        self.assertEqual(len(self.fs), 0)

    def test_contains(self):
        for c in self.letters:
            self.assertEqual(c in self.s, c in self.d)
        self.assertNotIn(1, self.s)
        self.assertIn(self.obj, self.fs)
        del self.obj
        self.assertNotIn(ustr('F'), self.fs)

    def test_union(self):
        u = self.s.union(self.items2)
        for c in self.letters:
            self.assertEqual(c in u, c in self.d or c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(u), WeakSet)
        self.assertRaises(TypeError, self.s.union, [[]])
        for C in (set, frozenset, dict.fromkeys, list, tuple):
            x = WeakSet(self.items + self.items2)
            c = C(self.items2)
            self.assertEqual(self.s.union(c), x)
            del c
        self.assertEqual(len(u), len(self.items) + len(self.items2))
        self.items2.pop()
        gc.collect()
        self.assertEqual(len(u), len(self.items) + len(self.items2))

    def test_or(self):
        i = self.s.union(self.items2)
        self.assertEqual(self.s | set(self.items2), i)
        self.assertEqual(self.s | frozenset(self.items2), i)

    def test_intersection(self):
        s = WeakSet(self.letters)
        i = s.intersection(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.items2 and c in self.letters)
        self.assertEqual(s, WeakSet(self.letters))
        self.assertEqual(type(i), WeakSet)
        for C in (set, frozenset, dict.fromkeys, list, tuple):
            x = WeakSet([])
            self.assertEqual(i.intersection(C(self.items)), x)
        self.assertEqual(len(i), len(self.items2))
        self.items2.pop()
        gc.collect()
        self.assertEqual(len(i), len(self.items2))

    def test_isdisjoint(self):
        self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
        self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters)))

    def test_and(self):
        i = self.s.intersection(self.items2)
        self.assertEqual(self.s & set(self.items2), i)
        self.assertEqual(self.s & frozenset(self.items2), i)

    def test_difference(self):
        i = self.s.difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c not in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.difference, [[]])

    def test_sub(self):
        i = self.s.difference(self.items2)
        self.assertEqual(self.s - set(self.items2), i)
        self.assertEqual(self.s - frozenset(self.items2), i)

    def test_symmetric_difference(self):
        i = self.s.symmetric_difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
        self.assertEqual(len(i), len(self.items) + len(self.items2))
        self.items2.pop()
        gc.collect()
        self.assertEqual(len(i), len(self.items) + len(self.items2))

    def test_xor(self):
        i = self.s.symmetric_difference(self.items2)
        self.assertEqual(self.s ^ set(self.items2), i)
        self.assertEqual(self.s ^ frozenset(self.items2), i)

    def test_sub_and_super(self):
        self.assertTrue(self.ab_weakset <= self.abcde_weakset)
        self.assertTrue(self.abcde_weakset <= self.abcde_weakset)
        self.assertTrue(self.abcde_weakset >= self.ab_weakset)
        self.assertFalse(self.abcde_weakset <= self.def_weakset)
        self.assertFalse(self.abcde_weakset >= self.def_weakset)
        self.assertTrue(set('a').issubset('abc'))
        self.assertTrue(set('abc').issuperset('a'))
        self.assertFalse(set('a').issubset('cbs'))
        self.assertFalse(set('cbs').issuperset('a'))

    def test_lt(self):
        self.assertTrue(self.ab_weakset < self.abcde_weakset)
        self.assertFalse(self.abcde_weakset < self.def_weakset)
        self.assertFalse(self.ab_weakset < self.ab_weakset)
        self.assertFalse(WeakSet() < WeakSet())

    def test_gt(self):
        self.assertTrue(self.abcde_weakset > self.ab_weakset)
        self.assertFalse(self.abcde_weakset > self.def_weakset)
        self.assertFalse(self.ab_weakset > self.ab_weakset)
        self.assertFalse(WeakSet() > WeakSet())

    def test_gc(self):
        s = WeakSet(Foo() for i in range(1000))
        for elem in s:
            elem.cycle = s
            elem.sub = elem
            elem.set = WeakSet([elem])

    def test_subclass_with_custom_hash(self):
        class H(WeakSet):
            def __hash__(self):
                return int(id(self) & 2147483647)

        s = H()
        f = set()
        f.add(s)
        self.assertIn(s, f)
        f.remove(s)
        f.add(s)
        f.discard(s)

    def test_init(self):
        s = WeakSet()
        s.__init__(self.items)
        self.assertEqual(s, self.s)
        s.__init__(self.items2)
        self.assertEqual(s, WeakSet(self.items2))
        self.assertRaises(TypeError, s.__init__, s, 2)
        self.assertRaises(TypeError, s.__init__, 1)

    def test_constructor_identity(self):
        s = WeakSet(self.items)
        t = WeakSet(s)
        self.assertNotEqual(id(s), id(t))

    def test_hash(self):
        self.assertRaises(TypeError, hash, self.s)

    def test_clear(self):
        self.s.clear()
        self.assertEqual(self.s, WeakSet([]))
        self.assertEqual(len(self.s), 0)

    def test_copy(self):
        dup = self.s.copy()
        self.assertEqual(self.s, dup)
        self.assertNotEqual(id(self.s), id(dup))

    def test_add(self):
        x = ustr('Q')
        self.s.add(x)
        self.assertIn(x, self.s)
        dup = self.s.copy()
        self.s.add(x)
        self.assertEqual(self.s, dup)
        self.assertRaises(TypeError, self.s.add, [])
        self.fs.add(Foo())
        self.assertTrue(len(self.fs) == 1)
        self.fs.add(self.obj)
        self.assertTrue(len(self.fs) == 1)

    def test_remove(self):
        x = ustr('a')
        self.s.remove(x)
        self.assertNotIn(x, self.s)
        self.assertRaises(KeyError, self.s.remove, x)
        self.assertRaises(TypeError, self.s.remove, [])

    def test_discard(self):
        a, q = ustr('a'), ustr('Q')
        self.s.discard(a)
        self.assertNotIn(a, self.s)
        self.s.discard(q)
        self.assertRaises(TypeError, self.s.discard, [])

    def test_pop(self):
        for i in range(len(self.s)):
            elem = self.s.pop()
            self.assertNotIn(elem, self.s)
        self.assertRaises(KeyError, self.s.pop)

    def test_update(self):
        retval = self.s.update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)
        self.assertRaises(TypeError, self.s.update, [[]])

    def test_update_set(self):
        self.s.update(set(self.items2))
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_ior(self):
        self.s |= set(self.items2)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_intersection_update(self):
        retval = self.s.intersection_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.intersection_update, [[]])

    def test_iand(self):
        self.s &= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_difference_update(self):
        retval = self.s.difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.difference_update, [[]])
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_isub(self):
        self.s -= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_symmetric_difference_update(self):
        retval = self.s.symmetric_difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_ixor(self):
        self.s ^= set(self.items2)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_inplace_on_self(self):
        t = self.s.copy()
        t |= t
        self.assertEqual(t, self.s)
        t &= t
        self.assertEqual(t, self.s)
        t -= t
        self.assertEqual(t, WeakSet())
        t = self.s.copy()
        t ^= t
        self.assertEqual(t, WeakSet())

    def test_eq(self):
        self.assertTrue(self.s == self.s)
        self.assertTrue(self.s == WeakSet(self.items))
        self.assertFalse(self.s == set(self.items))
        self.assertFalse(self.s == list(self.items))
        self.assertFalse(self.s == tuple(self.items))
        self.assertFalse(self.s == WeakSet([Foo]))
        self.assertFalse(self.s == 1)

    def test_ne(self):
        self.assertTrue(self.s != set(self.items))
        s1 = WeakSet()
        s2 = WeakSet()
        self.assertFalse(s1 != s2)

    def test_weak_destroy_while_iterating(self):
        items = [ustr(c) for c in ('a', 'b', 'c')]
        s = WeakSet(items)
        it = iter(s)
        next(it)
        del items[-1]
        gc.collect()
        self.assertIn(len(list(it)), [len(items), len(items) - 1])
        del it
        self.assertEqual(len(s), len(items))

    def test_weak_destroy_and_mutate_while_iterating(self):
        items = [ustr(c) for c in string.ascii_letters]
        s = WeakSet(items)

        @contextlib.contextmanager
        def testcontext():
            try:
                it = iter(s)
                yielded = ustr(str(next(it)))
                u = ustr(str(items.pop()))
                if yielded == u:
                    next(it)
                gc.collect()
                yield u
            finally:
                it = None

        with testcontext() as u:
            self.assertNotIn(u, s)
        with testcontext() as u:
            self.assertRaises(KeyError, s.remove, u)
        self.assertNotIn(u, s)
        with testcontext() as u:
            s.add(u)
        self.assertIn(u, s)
        t = s.copy()
        with testcontext() as u:
            s.update(t)
        self.assertEqual(len(s), len(t))
        with testcontext() as u:
            s.clear()
        self.assertEqual(len(s), 0)

    def test_len_cycles(self):
        N = 20
        items = [RefCycle() for i in range(N)]
        s = WeakSet(items)
        del items
        it = iter(s)
        try:
            next(it)
        except StopIteration:
            pass
        gc.collect()
        n1 = len(s)
        del it
        gc.collect()
        n2 = len(s)
        self.assertIn(n1, (0, 1))
        self.assertEqual(n2, 0)

    def test_len_race(self):
        self.addCleanup(gc.set_threshold, *gc.get_threshold())
        for th in range(1, 100):
            N = 20
            gc.collect(0)
            gc.set_threshold(th, th, th)
            items = [RefCycle() for i in range(N)]
            s = WeakSet(items)
            del items
            it = iter(s)
            try:
                next(it)
            except StopIteration:
                pass
            n1 = len(s)
            del it
            n2 = len(s)
            self.assertGreaterEqual(n1, 0)
            self.assertLessEqual(n1, N)
            self.assertGreaterEqual(n2, 0)
            self.assertLessEqual(n2, n1)
Exemplo n.º 17
0
class Manager:

    def __init__(self):
        self.listeners = {}
        self.cell_aliases = {}
        self.cell_rev_aliases = {}
        self.macro_listeners = {}
        self.observers = {}
        self.registrar_listeners = WeakKeyDictionary()
        self.rev_registrar_listeners = WeakKeyDictionary()
        self.pin_to_cells = {}
        self.cells = WeakValueDictionary()
        self.cell_to_output_pin = WeakKeyDictionary()
        self._childids = WeakValueDictionary()
        self.registrar_items = []
        self.unstable_workers = WeakSet()
        super().__init__()

    def set_stable(self, worker, value):
        assert value in (True, False), value
        if not value:
            #print("UNSTABLE", worker)
            self.unstable_workers.add(worker)
        else:
            #print("STABLE", worker)
            self.unstable_workers.discard(worker)

    def add_cell_alias(self, source, target):
        from .cell import Cell
        assert isinstance(source, Cell)
        assert isinstance(target, Cell)
        assert source is not target
        cell_id = self.get_cell_id(source)
        target_ref = weakref.ref(target)

        try:
            aliases = self.cell_aliases[cell_id]
            if target_ref not in aliases:
                aliases.append(target_ref)

        except KeyError:
            self.cell_aliases[cell_id] = [target_ref]

        if cell_id not in self.cells:
            self.cells[cell_id] = source

        #reverse alias
        cell_id = self.get_cell_id(target)
        source_ref = weakref.ref(source)

        try:
            rev_aliases = self.cell_rev_aliases[cell_id]
            if source_ref not in rev_aliases:
                rev_aliases.append(source_ref)

        except KeyError:
            self.cell_rev_aliases[cell_id] = [source_ref]

        if cell_id not in self.cells:
            self.cells[cell_id] = target

    def add_registrar_item(self, registrar_name, dtype, data, data_name):
        item = registrar_name, dtype, data, data_name
        for curr_item in self.registrar_items:
            if data_name is None:
                exists = (curr_item[:3] == item[:3])
            else:
                exists = (curr_item[:2] == item[:2]) and \
                  curr_item[3] == data_name
            if exists:
                raise ValueError("Registrar item already exists")
        self.registrar_items.append(item)

    def remove_registrar_item(self, registrar_name, dtype, data, data_name):
        item = registrar_name, dtype, data, data_name
        self.registrar_items.remove(item)

    def add_listener(self, cell, input_pin):
        cell_id = self.get_cell_id(cell)
        pin_ref = weakref.ref(input_pin)

        try:
            listeners = self.listeners[cell_id]
            assert pin_ref not in listeners
            # TODO: tolerate (silently ignore) a connection that exists already?
            listeners.append(pin_ref)

        except KeyError:
            self.listeners[cell_id] = [pin_ref]

        try:
            curr_pin_to_cells = self.pin_to_cells[input_pin.get_pin_id()]
            assert cell_id not in curr_pin_to_cells
            # TODO: tolerate (append) multiple inputs?
            curr_pin_to_cells.append(cell_id)

        except KeyError:
            self.pin_to_cells[input_pin.get_pin_id()] = [cell_id]

        if cell_id not in self.cells:
            self.cells[cell_id] = cell

    def _remove_listener(self, cell_id, input_pin, worker):
        input_pin_id = input_pin.get_pin_id()
        l = self.listeners[cell_id]
        l[:] = [ref for ref in l if ref().get_pin_id() != input_pin_id]
        if not len(l):
            self.listeners.pop(cell_id)
            cell = self.cells.get(cell_id, None)
            if cell is not None:
                cell._on_disconnect(input_pin, worker, False)

    def remove_listener(self, cell, input_pin):
        worker = input_pin.worker_ref()
        input_pin_id = input_pin.get_pin_id()
        cell_ids = self.pin_to_cells.pop(input_pin_id, [])
        cell_id = self.get_cell_id(cell)
        self._remove_listener(cell_id, input_pin, worker)

    def remove_listeners_pin(self, input_pin):
        worker = input_pin.worker_ref()
        cell_ids = self.pin_to_cells.pop(input_pin.get_pin_id(), [])
        for cell_id in cell_ids:
            self._remove_listener(cell_id, input_pin, worker)

    def remove_aliases(self, cell):
        cell_id = self.get_cell_id(cell)
        cell_ref = weakref.ref(cell)
        targets = self.cell_aliases.pop(cell_id, [])

        for target_ref in targets:
            target = target_ref()
            if target is None:
                continue
            target._on_disconnect(cell, None, incoming=True)
            target_id = self.get_cell_id(target)
            r = self.cell_rev_aliases[target_id]
            r[:] = [rr for rr in r if rr is not cell_ref]
            if not len(r):
                self.cell_rev_aliases.pop(target_id)

        #rev_aliases
        targets = self.cell_rev_aliases.pop(cell_id, [])

        for target_ref in targets:
            target = target_ref()
            if target is None:
                continue
            target_id = self.get_cell_id(target)
            r = self.cell_aliases[target_id]
            r[:] = [rr for rr in r if rr is not cell_ref]
            if not len(r):
                self.cell_aliases.pop(target_id)

    def remove_listeners_cell(self, cell):
        cell_id = self.get_cell_id(cell)
        listeners = self.listeners.pop(cell_id, [])
        for listener in listeners:
            pin = listener()
            if pin is None:
                continue
            pin_id = pin.get_pin_id()
            if pin_id not in self.pin_to_cells:
                continue
            self.pin_to_cells[pin_id][:] = \
                [c for c in self.pin_to_cells[pin_id] if c != cell_id ]


    def add_macro_listener(self, cell, macro_object, macro_arg):
        cell_id = self.get_cell_id(cell)
        m = (macro_object, macro_arg)

        try:
            macro_listeners = self.macro_listeners[cell_id]
            assert m not in macro_listeners
            macro_listeners.append(m)

        except KeyError:
            self.macro_listeners[cell_id] = [m]
            if cell_id not in self.cells:
                self.cells[cell_id] = cell

    def remove_macro_listener(self, cell, macro_object, macro_arg):
        cell_id = self.get_cell_id(cell)
        m = (macro_object, macro_arg)

        if cell_id in self.macro_listeners:
            l = self.macro_listeners[cell_id]
            if m in l:
                l.remove(m)

    def remove_macro_listeners_cell(self, cell):
        cell_id = self.get_cell_id(cell)
        listeners = self.macro_listeners.pop(cell_id, [])


    def add_registrar_listener(self, registrar, key, target, namespace_name):
        if registrar not in self.registrar_listeners:
            self.registrar_listeners[registrar] = {}
        d = self.registrar_listeners[registrar]
        if key not in d:
            d[key] = []
        d[key].append((weakref.ref(target), namespace_name))

        if target not in self.rev_registrar_listeners:
            self.rev_registrar_listeners[target] = {}
        r = self.rev_registrar_listeners[target]
        if key not in r:
            r[key] = []
        r[key].append(weakref.ref(registrar))

    def remove_registrar_listeners(self, target):
        if target not in self.rev_registrar_listeners:
            return
        rev = self.rev_registrar_listeners.pop(target)
        for key in rev:
            for registrar_ref in rev[key]:
                registrar = registrar_ref()
                if registrar not in self.registrar_listeners:
                    continue
                r = self.registrar_listeners[registrar]
                t = r[key]
                t[:] = [tt for tt in t if tt[0]() is not None and tt[0]() is not target]
                if not len(t):
                    r.pop(key)
                    if not len(r):
                        self.registrar_listeners.pop(registrar)


    def add_observer(self, cell, observer):
        cell_id = self.get_cell_id(cell)
        obs_ref = weakref.ref(observer)

        try:
            observers = self.observers[cell_id]
            assert obs_ref not in observers
            observers.append(obs_ref)
        except KeyError:
            self.observers[cell_id] = [obs_ref]
        if cell_id not in self.cells:
            self.cells[cell_id] = cell

    def remove_observer(self, cell, observer):
        cell_id = self.get_cell_id(cell)
        obs_ref = weakref.ref(observer)

        if cell_id in self.observers:
            l = self.observers[cell_id]
            if obs_ref in l:
                l.remove(obs_ref)

    def remove_observers_cell(self, cell):
        cell_id = self.get_cell_id(cell)
        listeners = self.observers.pop(cell_id, [])

    def _update(self, cell, dtype, value, *,
            worker=None, only_last=False):
        import threading
        assert threading.current_thread() is threading.main_thread()
        from .cell import Signal
        cell_id = self.get_cell_id(cell)

        observers = self.observers.get(cell_id, [])
        for observer in observers:
            obs = observer()
            if obs is not None:
                obs(value)

        macro_listeners = self.macro_listeners.get(cell_id, [])
        if not only_last:
            for macro_object, macro_arg in macro_listeners:
                try:
                    updated = macro_object.update_cell(macro_arg)
                except Exception:
                    #TODO: proper logging
                    import traceback
                    traceback.print_exc()

        aliases = self.cell_aliases.get(cell_id, [])
        for target_cell_ref in aliases:
            target_cell = target_cell_ref()
            if target_cell is not None:
                if isinstance(target_cell, Signal):
                    #print("cell-cell alias", cell, "=>", target_cell)
                    self._update(target_cell, None, None,
                        worker=worker, only_last=only_last)
                else:
                    value2 = value
                    if dtype is not None and \
                      (dtype == "cson" or dtype[0] == "cson") and \
                      target_cell.dtype is not None and \
                      (target_cell.dtype == "json" or target_cell.dtype[0] == "json"):
                        if isinstance(value, (str, bytes)):
                            value2 = cson2json(value)

                    target_cell._update(value2, propagate=True)

        listeners = self.listeners.get(cell_id, [])
        if only_last:
            listeners = listeners[-1:]

        resource_name0 = None
        if cell.resource is not None:
            resource_name0 = cell.resource.filepath
        for input_pin_ref in listeners:
            input_pin = input_pin_ref()

            if input_pin is None:
                continue #TODO: error?

            if worker is not None and input_pin.worker_ref() is worker:
                continue
            value2 = value
            if dtype is not None and \
              (dtype == "cson" or dtype[0] == "cson") and \
              input_pin.dtype is not None and \
              (input_pin.dtype == "json" or input_pin.dtype[0] == "json"):
                if isinstance(value, (str, bytes)):
                    value2 = cson2json(value)
            resource_name = "pin: " + str(input_pin)
            if resource_name0 is not None:
                resource_name = resource_name0 + " in " + resource_name
            input_pin.receive_update(value2, resource_name)

    def update_from_code(self, cell, only_last=False):
        import seamless
        if cell.dtype == "array":
            value = TransportedArray(cell._data, cell._store)
        else:
            value = cell._data
        #print("manager.update_from_code", cell, head(value))
        self._update(cell, cell.dtype, value, only_last=only_last)
        from .. import run_work
        from .macro import get_macro_mode
        if not get_macro_mode():
            run_work()

    def update_from_worker(self, cell_id, value, worker, *, preliminary):
        import seamless
        from .cell import Signal
        cell = self.cells.get(cell_id, None)
        if cell is None or cell._destroyed:
            return #cell has died...
        #print("manager.update_from_worker", cell, head(value), worker)

        if isinstance(cell, Signal):
            assert value is None
            self._update(cell, None, None, worker=worker)
        else:
            changed = cell._update(value,propagate=False,
                preliminary=preliminary)
            if changed:
                if cell.dtype == "array":
                    value = TransportedArray(value, cell._store)
                self._update(cell, cell.dtype, value, worker=worker)

    def update_registrar_key(self, registrar, key):
        from .worker import Worker
        from .macro import MacroObject
        if registrar not in self.registrar_listeners:
            return
        d = self.registrar_listeners[registrar]
        if key not in d:
            return
        for t in list(d[key]):
            target = t[0]()
            if target is None:
                continue
            if isinstance(target, Worker):
                namespace_name = t[1]
                target.receive_registrar_update(registrar.name, key, namespace_name)
            elif isinstance(target, MacroObject):
                target.update_cell((registrar.name, key))
            else:
                raise TypeError(target)
    @classmethod
    def get_cell_id(cls, cell):
        return id(cell)

    def disconnect(self, source, target):
        from .transformer import Transformer
        from .cell import Cell, CellLike
        from .context import Context
        from .worker import EditPinBase, ExportedEditPin, \
            InputPinBase, ExportedInputPin, OutputPinBase, ExportedOutputPin
        if isinstance(source, EditPinBase):
            source, target = target, source
        if isinstance(source, CellLike) and source._like_cell:
            if isinstance(target, ExportedInputPin):
                target = target.get_pin()
            if isinstance(source, Context):
                assert "_output" in source._pins
                source = source._pins["_output"]
            self.remove_listener(source, target)
            worker = target.worker_ref()
            if worker is not None:
                source._on_disconnect(target, worker, incoming = False)

        elif isinstance(source, OutputPinBase):
            if isinstance(target, Context):
                assert "_input" in target._pins
                target = target._pins["_input"]
            if isinstance(source, ExportedOutputPin):
                source = source.get_pin()

            cell_id = self.get_cell_id(target)

            ok = False
            if cell_id in self.cells and \
              target in self.cell_to_output_pin:
                if cell_id not in source._cell_ids:
                    ok = False
                else:
                    for ref in self.cell_to_output_pin[target]:
                        if ref() is source:
                            self.cell_to_output_pin[target].remove(ref)
                            source._cell_ids.remove(cell_id)
                            ok = True
            if not ok:
                raise ValueError("Connection does not exist")

                if target not in self.cell_to_output_pin:
                    self.cell_to_output_pin[target] = []
                self.cell_to_output_pin[target].append(weakref.ref(source))

            worker = source.worker_ref()
            if worker is not None:
                if isinstance(worker, Transformer):
                    worker._on_disconnect_output()
                target._on_disconnect(source, worker, incoming = True)

        else:
            raise TypeError

    def connect(self, source, target):
        from .transformer import Transformer
        from .cell import Cell, CellLike
        from .context import Context
        from .worker import EditPinBase, ExportedEditPin, \
            InputPinBase, ExportedInputPin, OutputPinBase, ExportedOutputPin
        if isinstance(source, EditPinBase):
            source, target = target, source
        if isinstance(source, CellLike) and source._like_cell:
            assert isinstance(target, (InputPinBase, EditPinBase, CellLike))
            assert source._get_manager() is self
            assert target._get_manager() is self
            if isinstance(target, ExportedInputPin):
                target = target.get_pin()

            if isinstance(target, Cell):
                self.add_cell_alias(source, target)
                target._on_connect(source, None, incoming = True)
                if source._status == Cell.StatusFlags.OK:
                    value = source._data
                    if source.dtype is not None and \
                      (source.dtype == "cson" or source.dtype[0] == "cson") and \
                      target.dtype is not None and \
                      (target.dtype == "json" or target.dtype[0] == "json"):
                        if isinstance(value, (str, bytes)):
                            value = cson2json(value)
                    target._update(value,propagate=True)

                return
            assert not isinstance(target, Context) #TODO?
            worker = target.worker_ref()
            assert worker is not None #weakref may not be dead
            source._on_connect(target, worker, incoming = False)
            self.add_listener(source, target)

            if source._status == Cell.StatusFlags.OK:
                self.update_from_code(source, only_last=True)
            else:
                if isinstance(target, EditPinBase) and target.last_value is not None:
                    self.update_from_worker(
                        self.get_cell_id(source),
                        target.last_value,
                        worker, preliminary=False
                    )

        elif isinstance(source, OutputPinBase):
            assert isinstance(target, CellLike) and target._like_cell
            if isinstance(target, Context):
                assert "_input" in target._pins
                target = target._pins["_input"]
            if isinstance(source, ExportedOutputPin):
                source = source.get_pin()
            worker = source.worker_ref()
            assert worker is not None #weakref may not be dead
            target._on_connect(source, worker, incoming = True)
            cell_id = self.get_cell_id(target)
            if cell_id not in self.cells:
                self.cells[cell_id] = target

            if cell_id not in source._cell_ids:
                source._cell_ids.append(cell_id)
                if target not in self.cell_to_output_pin:
                    self.cell_to_output_pin[target] = []
                self.cell_to_output_pin[target].append(weakref.ref(source))

            if isinstance(worker, Transformer):
                worker._on_connect_output()
            elif source.last_value is not None:
                self.update_from_worker(
                    cell_id,
                    source.last_value,
                    worker,
                    preliminary=False
                )


        else:
            raise TypeError
Exemplo n.º 18
0
class MountManager:
    _running = False
    _last_run = None
    _stop = False
    _mounting = False
    def __init__(self, latency):
        self.latency = latency
        self.mounts = WeakKeyDictionary()
        self.contexts = WeakSet()
        self.lock = RLock()
        self.cell_updates = deque()
        self._tick = Event()
        self.stash = None
        self.paths = WeakKeyDictionary()

    @property
    def reorganizing(self):
        return self.stash is not None

    @contextmanager
    def reorganize(self, context):
        if context is None:
            self.stash = NoStash
            yield
            self.stash = None
            return
        if self.stash is not None:
            assert context._part_of2(self.stash.context)
            yield
            return
        with self.lock:
            self.stash = MountManagerStash(self, context)
            try:
                self.stash.activate()
                yield
                #print("reorganize success")
                self.stash.join()
            except Exception as e:
                #print("reorganize failure")
                self.stash.undo()
                raise e
            finally:
                self.stash = None

    def add_mount(self, cell, path, mode, authority, persistent, **kwargs):
        root = cell._root()
        if root not in self.paths:
            paths = set()
            self.paths[root] = paths
        else:
            paths = self.paths[root]
        assert path not in paths, path
        #print("add mount", path, cell)
        paths.add(path)
        self.mounts[cell] = MountItem(self, cell, path, mode, authority, persistent, **kwargs)
        if self.stash is None or self.stash is NoStash:
            try:
                self._mounting = True
                self.mounts[cell].init()
            finally:
                self._mounting = False

    def add_link(self, link, path, persistent):
        paths = self.paths[link._root()]
        assert path not in paths, path
        #print("add link", path, link)
        paths.add(path)
        self.mounts[link] = LinkItem(link, path, persistent)
        if self.stash is None or self.stash is NoStash:
            self.mounts[link].init()

    def unmount(self, cell_or_link, from_del=False):
        #print("UNMOUNT", cell_or_link, cell_or_link._mount)
        assert not is_dummy_mount(cell_or_link._mount), cell_or_link
        root = cell_or_link._root()
        if from_del and (cell_or_link not in self.mounts or root not in self.paths):
            return
        paths = self.paths[root]
        path = cell_or_link._mount["path"]
        assert path in paths
        paths.remove(path)
        assert cell_or_link in self.mounts, (cell_or_link, path)  #... but path is in paths
        mountitem = self.mounts.pop(cell_or_link)
        mountitem.destroy()

    def unmount_context(self, context, from_del=False):
        #print("unmount context", context)
        self.contexts.discard(context) # may or may not exist, especially at __del__ time
        mount = context._mount
        """context._mount is authoritative!
        If context is destroyed while an unmount is undesired,
          (because of stash replacement)
        context._mount MUST have been set to None!
        """
        if context._root() is context:
            self.paths.pop(context, None)
            if mount is None:
                return
        assert not is_dummy_mount(mount), context
        try:
            paths = self.paths[context._root()]
        except KeyError:
            return
        try:
            paths.remove(mount["path"])
        except KeyError:
            pass
        if mount["persistent"] == False:
            dirpath = mount["path"].replace("/", os.sep)
            try:
                #print("rmdir", dirpath)
                os.rmdir(dirpath)
            except:
                print("Error: cannot remove directory %s" % dirpath)


    def add_context(self, context, path, as_parent):
        #print("add context", path, context, as_parent, context._mount["persistent"])
        paths = self.paths[context._root()]
        if not as_parent:
            assert path not in paths, path
            paths.add(path)
            self.contexts.add(context)
        else:
            if path in paths:
                assert context in self.contexts, (path, context)
        if self.stash is None or self.stash is NoStash:
            self._check_context(context, as_parent)
        else:
            self.stash.context_as_parent[context] = as_parent

    def _check_context(self, context, as_parent):
        mount = context._mount
        assert not is_dummy_mount(mount), context
        dirpath = mount["path"].replace("/", os.sep)
        persistent, authority = mount["persistent"], mount["authority"]
        if os.path.exists(dirpath):
            if authority == "cell" and not as_parent:
                print("Warning: Directory path '%s' already exists" % dirpath) #TODO: log warning
        else:
            if authority == "file-strict":
                raise Exception("Directory path '%s' does not exist, but authority is 'file-strict'" % dirpath)
            os.mkdir(dirpath)

    def add_cell_update(self, cell):
        #print("add_cell_update", cell, self.reorganizing, self.mounting)
        if self.reorganizing or self._mounting:
            return
        assert cell in self.mounts, (cell, hex(id(cell)))
        self.cell_updates.append(cell)

    def _run(self):
        for cell, mount_item in list(self.mounts.items()):
            if isinstance(cell, Link):
                continue
            if cell in self.cell_updates:
                continue
            try:
                mount_item.conditional_read()
            except Exception:
                exc = traceback.format_exc()
                if exc != mount_item.last_exc:
                    print(exc)
                    mount_item.last_exc = exc
        while 1:
            try:
                cell = self.cell_updates.popleft()
            except IndexError:
                break
            mount_item = self.mounts.get(cell)
            if mount_item is None: #cell was deleted
                continue
            try:
                mount_item.conditional_write(with_none=True)
            except Exception:
                exc = traceback.format_exc()
                if exc != mount_item.last_exc:
                    print(exc)
                    mount_item.last_exc = exc
        self._tick.set()

    def run(self):
        try:
            self._running = True
            while not self._stop:
                t = time.time()
                self._run()
                while time.time() - t < self.latency:
                    if not self._tick.is_set():
                        break
                    time.sleep(0.01)
        finally:
            self._running = False

    def start(self):
        self._stop = False
        t = self.thread = Thread(target=self.run)
        t.setDaemon(True)
        t.start()

    def stop(self, wait=False, waiting_loop_period=0.01):
        self._stop = True
        if wait:
            while self._running:
                time.sleep(waiting_loop_period)

    def tick(self):
        """Waits until one iteration of the run() loop has finished"""
        if self._running:
            self._tick.clear()
            self._tick.wait()

    def destroy(self):
        for path in list(self.mounts.keys()):
            self.unmount(path)
        for context in sorted(self.contexts,key=lambda l:-len(l.path)):
            self.unmount_context(context)
Exemplo n.º 19
0
class Context(ContextBase, AttributeSetter):
    """Create a zmq Context

    A zmq Context creates sockets via its ``ctx.socket`` method.
    """

    sockopts = None
    _instance = None
    _instance_lock = Lock()
    _instance_pid = None
    _shadow = False
    _sockets = None

    def __init__(self, io_threads=1, **kwargs):
        super(Context, self).__init__(io_threads=io_threads, **kwargs)
        if kwargs.get('shadow', False):
            self._shadow = True
        else:
            self._shadow = False
        self.sockopts = {}
        self._sockets = WeakSet()

    def __del__(self):
        """deleting a Context should terminate it, without trying non-threadsafe destroy"""

        # Calling locals() here conceals issue #1167 on Windows CPython 3.5.4.
        locals()

        if not self._shadow and not _exiting:
            self.term()

    def __enter__(self):
        return self

    def __exit__(self, *args, **kwargs):
        self.term()

    def __copy__(self, memo=None):
        """Copying a Context creates a shadow copy"""
        return self.__class__.shadow(self.underlying)

    __deepcopy__ = __copy__

    @classmethod
    def shadow(cls, address):
        """Shadow an existing libzmq context

        address is the integer address of the libzmq context
        or an FFI pointer to it.

        .. versionadded:: 14.1
        """
        from zmq.utils.interop import cast_int_addr

        address = cast_int_addr(address)
        return cls(shadow=address)

    @classmethod
    def shadow_pyczmq(cls, ctx):
        """Shadow an existing pyczmq context

        ctx is the FFI `zctx_t *` pointer

        .. versionadded:: 14.1
        """
        from pyczmq import zctx  # type: ignore
        from zmq.utils.interop import cast_int_addr

        underlying = zctx.underlying(ctx)
        address = cast_int_addr(underlying)
        return cls(shadow=address)

    # static method copied from tornado IOLoop.instance
    @classmethod
    def instance(cls: Type[T], io_threads=1) -> T:
        """Returns a global Context instance.

        Most single-threaded applications have a single, global Context.
        Use this method instead of passing around Context instances
        throughout your code.

        A common pattern for classes that depend on Contexts is to use
        a default argument to enable programs with multiple Contexts
        but not require the argument for simpler applications::

            class MyClass(object):
                def __init__(self, context=None):
                    self.context = context or Context.instance()

        .. versionchanged:: 18.1

            When called in a subprocess after forking,
            a new global instance is created instead of inheriting
            a Context that won't work from the parent process.
        """
        if (cls._instance is None or cls._instance_pid != os.getpid()
                or cls._instance.closed):
            with cls._instance_lock:
                if (cls._instance is None or cls._instance_pid != os.getpid()
                        or cls._instance.closed):
                    cls._instance = cls(io_threads=io_threads)
                    cls._instance_pid = os.getpid()
        return cls._instance

    def term(self):
        """Close or terminate the context.

        Context termination is performed in the following steps:

        - Any blocking operations currently in progress on sockets open within context shall
          raise :class:`zmq.ContextTerminated`.
          With the exception of socket.close(), any further operations on sockets open within this context
          shall raise :class:`zmq.ContextTerminated`.
        - After interrupting all blocking calls, term shall block until the following conditions are satisfied:
            - All sockets open within context have been closed.
            - For each socket within context, all messages sent on the socket have either been
              physically transferred to a network peer,
              or the socket's linger period set with the zmq.LINGER socket option has expired.

        For further details regarding socket linger behaviour refer to libzmq documentation for ZMQ_LINGER.

        This can be called to close the context by hand. If this is not called,
        the context will automatically be closed when it is garbage collected.
        """
        return super(Context, self).term()

    # -------------------------------------------------------------------------
    # Hooks for ctxopt completion
    # -------------------------------------------------------------------------

    def __dir__(self):
        keys = dir(self.__class__)

        for collection in (ctx_opt_names, ):
            keys.extend(collection)
        return keys

    # -------------------------------------------------------------------------
    # Creating Sockets
    # -------------------------------------------------------------------------

    def _add_socket(self, socket):
        self._sockets.add(socket)

    def _rm_socket(self, socket):
        if self._sockets:
            self._sockets.discard(socket)

    def destroy(self, linger=None):
        """Close all sockets associated with this context and then terminate
        the context.

        .. warning::

            destroy involves calling ``zmq_close()``, which is **NOT** threadsafe.
            If there are active sockets in other threads, this must not be called.

        Parameters
        ----------

        linger : int, optional
            If specified, set LINGER on sockets prior to closing them.
        """
        if self.closed:
            return

        sockets = self._sockets
        self._sockets = WeakSet()
        for s in sockets:
            if s and not s.closed:
                if linger is not None:
                    s.setsockopt(LINGER, linger)
                s.close()

        self.term()

    @property
    def _socket_class(self):
        return Socket

    def socket(self, socket_type, **kwargs):
        """Create a Socket associated with this Context.

        Parameters
        ----------
        socket_type : int
            The socket type, which can be any of the 0MQ socket types:
            REQ, REP, PUB, SUB, PAIR, DEALER, ROUTER, PULL, PUSH, etc.

        kwargs:
            will be passed to the __init__ method of the socket class.
        """
        if self.closed:
            raise ZMQError(ENOTSUP)
        s = self._socket_class(self, socket_type, **kwargs)
        for opt, value in self.sockopts.items():
            try:
                s.setsockopt(opt, value)
            except ZMQError:
                # ignore ZMQErrors, which are likely for socket options
                # that do not apply to a particular socket type, e.g.
                # SUBSCRIBE for non-SUB sockets.
                pass
        self._add_socket(s)
        return s

    def setsockopt(self, opt, value):
        """set default socket options for new sockets created by this Context

        .. versionadded:: 13.0
        """
        self.sockopts[opt] = value

    def getsockopt(self, opt):
        """get default socket options for new sockets created by this Context

        .. versionadded:: 13.0
        """
        return self.sockopts[opt]

    def _set_attr_opt(self, name, opt, value):
        """set default sockopts as attributes"""
        if name in constants.ctx_opt_names:
            return self.set(opt, value)
        else:
            self.sockopts[opt] = value

    def _get_attr_opt(self, name, opt):
        """get default sockopts as attributes"""
        if name in constants.ctx_opt_names:
            return self.get(opt)
        else:
            if opt not in self.sockopts:
                raise AttributeError(name)
            else:
                return self.sockopts[opt]

    def __delattr__(self, key):
        """delete default sockopts as attributes"""
        key = key.upper()
        try:
            opt = getattr(constants, key)
        except AttributeError:
            raise AttributeError("no such socket option: %s" % key)
        else:
            if opt not in self.sockopts:
                raise AttributeError(key)
            else:
                del self.sockopts[opt]
Exemplo n.º 20
0
class Character(PyfaBase, EveItemWrapper):
    """
    "Core" character class. It should be used for managing characters
    (e.g. in character editor). Fits will use proxy twin of the character.
    We cannot use core because different fits carry different attributes
    on character and all child entities (like skills, on-character
    implants, and so on).

    Pyfa model children:
    .RestrictedSet(skills)
    """

    __tablename__ = 'characters'

    id = Column('character_id', Integer, primary_key=True)
    alias = Column(String)

    def __init__(self, alias='', source=None):
        self.__generic_init()
        # Use default source, unless specified otherwise
        if source is None:
            source = SourceManager.default
        self.source = source
        self.alias = alias

    @reconstructor
    def _dbinit(self):
        self.__generic_init()
        # Use default source for all reconstructed characters
        self.source = SourceManager.default
        # Restore entities which are stored on character
        for skill in self._skills:
            self.skill.add(skill)

    def __generic_init(self):
        char_type_id = Type.character_static
        EveItemWrapper.__init__(self, char_type_id)
        # Attributes which store objects hidden behind properties
        self.__source = None
        # Set with fits which are loaded and use this character
        self.__loaded_proxies = WeakSet()
        self.skills = SkillCoreSet(self)
        self._eos_fit = EosFit()

    # EVE item wrapper methods
    @property
    def _source(self):
        return self.source

    @property
    def _eos_item(self):
        return self._eos_fit.character

    @property
    def _src_children(self):
        return get_src_children(chain(
            self.skills,
        ))

    # Miscellanea public stuff
    @property
    def source(self):
        return self.__source

    @source.setter
    def source(self, new_source):
        # Attempt to fetch source from source manager if passed object
        # is not instance of source class
        if not isinstance(new_source, Source) and new_source is not None:
            new_source = SourceManager.get(new_source)
        old_source = self.source
        # Do not update anything if sources are the same
        if new_source is old_source:
            return
        self.__source = new_source
        # Update eos model with new data
        self._eos_fit.source = getattr(new_source, 'eos', None)
        # Update pyfa model with new data
        # Unlike fit, character represents EVE item, thus we need
        # to update it too
        self._update_source()
        for src_child in self._src_children:
            src_child._update_source()

    persist = pyfa_persist
    abandon = pyfa_abandon

    def validate(self):
        self._eos_fit.validate()

    # Auxiliary methods
    def _proxy_iter(self):
        """
        Safe iterator over related character proxies, avoids issues
        with set size getting changed by GC during iteration.
        """
        for char_proxy in tuple(self.__loaded_proxies):
            yield char_proxy

    def _link_proxy(self, char_proxy):
        """Create connection between character core and proxy"""
        self.__loaded_proxies.add(char_proxy)

    def _unlink_proxy(self, char_proxy):
        """Remove connection between character core and proxy"""
        self.__loaded_proxies.discard(char_proxy)

    def __repr__(self):
        spec = ['id']
        return make_repr_str(self, spec)
Exemplo n.º 21
0
class TestWeakSet(unittest.TestCase):

    def setUp(self):
        # need to keep references to them
        self.items = [ustr(c) for c in ('a', 'b', 'c')]
        self.items2 = [ustr(c) for c in ('x', 'y', 'z')]
        self.ab_items = [ustr(c) for c in 'ab']
        self.abcde_items = [ustr(c) for c in 'abcde']
        self.def_items = [ustr(c) for c in 'def']
        self.ab_weakset = WeakSet(self.ab_items)
        self.abcde_weakset = WeakSet(self.abcde_items)
        self.def_weakset = WeakSet(self.def_items)
        self.letters = [ustr(c) for c in string.ascii_letters]
        self.s = WeakSet(self.items)
        self.d = dict.fromkeys(self.items)
        self.obj = ustr('F')
        self.fs = WeakSet([self.obj])

    def test_methods(self):
        weaksetmethods = dir(WeakSet)
        for method in dir(set):
            if method == 'test_c_api' or method.startswith('_'):
                continue
            self.assertIn(method, weaksetmethods,
                         "WeakSet missing method " + method)

    def test_new_or_init(self):
        self.assertRaises(TypeError, WeakSet, [], 2)

    def test_len(self):
        self.assertEqual(len(self.s), len(self.d))
        self.assertEqual(len(self.fs), 1)
        del self.obj
        support.gc_collect()
        self.assertEqual(len(self.fs), 0)

    def test_contains(self):
        for c in self.letters:
            self.assertEqual(c in self.s, c in self.d)
        # 1 is not weakref'able, but that TypeError is caught by __contains__
        self.assertNotIn(1, self.s)
        self.assertIn(self.obj, self.fs)
        del self.obj
        support.gc_collect()
        self.assertNotIn(ustr('F'), self.fs)

    def test_union(self):
        u = self.s.union(self.items2)
        for c in self.letters:
            self.assertEqual(c in u, c in self.d or c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(u), WeakSet)
        self.assertRaises(TypeError, self.s.union, [[]])
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet(self.items + self.items2)
            c = C(self.items2)
            self.assertEqual(self.s.union(c), x)
            del c
        self.assertEqual(len(u), len(self.items) + len(self.items2))
        self.items2.pop()
        gc.collect()
        self.assertEqual(len(u), len(self.items) + len(self.items2))

    def test_or(self):
        i = self.s.union(self.items2)
        self.assertEqual(self.s | set(self.items2), i)
        self.assertEqual(self.s | frozenset(self.items2), i)

    def test_intersection(self):
        s = WeakSet(self.letters)
        i = s.intersection(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.items2 and c in self.letters)
        self.assertEqual(s, WeakSet(self.letters))
        self.assertEqual(type(i), WeakSet)
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet([])
            self.assertEqual(i.intersection(C(self.items)), x)
        self.assertEqual(len(i), len(self.items2))
        self.items2.pop()
        gc.collect()
        self.assertEqual(len(i), len(self.items2))

    def test_isdisjoint(self):
        self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
        self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters)))

    def test_and(self):
        i = self.s.intersection(self.items2)
        self.assertEqual(self.s & set(self.items2), i)
        self.assertEqual(self.s & frozenset(self.items2), i)

    def test_difference(self):
        i = self.s.difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c not in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.difference, [[]])

    def test_sub(self):
        i = self.s.difference(self.items2)
        self.assertEqual(self.s - set(self.items2), i)
        self.assertEqual(self.s - frozenset(self.items2), i)

    def test_symmetric_difference(self):
        i = self.s.symmetric_difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
        self.assertEqual(len(i), len(self.items) + len(self.items2))
        self.items2.pop()
        gc.collect()
        self.assertEqual(len(i), len(self.items) + len(self.items2))

    def test_xor(self):
        i = self.s.symmetric_difference(self.items2)
        self.assertEqual(self.s ^ set(self.items2), i)
        self.assertEqual(self.s ^ frozenset(self.items2), i)

    def test_sub_and_super(self):
        self.assertTrue(self.ab_weakset <= self.abcde_weakset)
        self.assertTrue(self.abcde_weakset <= self.abcde_weakset)
        self.assertTrue(self.abcde_weakset >= self.ab_weakset)
        self.assertFalse(self.abcde_weakset <= self.def_weakset)
        self.assertFalse(self.abcde_weakset >= self.def_weakset)
        self.assertTrue(set('a').issubset('abc'))
        self.assertTrue(set('abc').issuperset('a'))
        self.assertFalse(set('a').issubset('cbs'))
        self.assertFalse(set('cbs').issuperset('a'))

    def test_lt(self):
        self.assertTrue(self.ab_weakset < self.abcde_weakset)
        self.assertFalse(self.abcde_weakset < self.def_weakset)
        self.assertFalse(self.ab_weakset < self.ab_weakset)
        self.assertFalse(WeakSet() < WeakSet())

    def test_gt(self):
        self.assertTrue(self.abcde_weakset > self.ab_weakset)
        self.assertFalse(self.abcde_weakset > self.def_weakset)
        self.assertFalse(self.ab_weakset > self.ab_weakset)
        self.assertFalse(WeakSet() > WeakSet())

    def test_gc(self):
        # Create a nest of cycles to exercise overall ref count check
        s = WeakSet(Foo() for i in range(1000))
        for elem in s:
            elem.cycle = s
            elem.sub = elem
            elem.set = WeakSet([elem])

    def test_subclass_with_custom_hash(self):
        # Bug #1257731
        class H(WeakSet):
            def __hash__(self):
                return int(id(self) & 0x7fffffff)
        s=H()
        f=set()
        f.add(s)
        self.assertIn(s, f)
        f.remove(s)
        f.add(s)
        f.discard(s)

    def test_init(self):
        s = WeakSet()
        s.__init__(self.items)
        self.assertEqual(s, self.s)
        s.__init__(self.items2)
        self.assertEqual(s, WeakSet(self.items2))
        self.assertRaises(TypeError, s.__init__, s, 2);
        self.assertRaises(TypeError, s.__init__, 1);

    def test_constructor_identity(self):
        s = WeakSet(self.items)
        t = WeakSet(s)
        self.assertNotEqual(id(s), id(t))

    def test_hash(self):
        self.assertRaises(TypeError, hash, self.s)

    def test_clear(self):
        self.s.clear()
        self.assertEqual(self.s, WeakSet([]))
        self.assertEqual(len(self.s), 0)

    def test_copy(self):
        dup = self.s.copy()
        self.assertEqual(self.s, dup)
        self.assertNotEqual(id(self.s), id(dup))

    def test_add(self):
        x = ustr('Q')
        self.s.add(x)
        self.assertIn(x, self.s)
        dup = self.s.copy()
        self.s.add(x)
        self.assertEqual(self.s, dup)
        self.assertRaises(TypeError, self.s.add, [])
        self.fs.add(Foo())
        support.gc_collect()
        self.assertTrue(len(self.fs) == 1)
        self.fs.add(self.obj)
        self.assertTrue(len(self.fs) == 1)

    def test_remove(self):
        x = ustr('a')
        self.s.remove(x)
        self.assertNotIn(x, self.s)
        self.assertRaises(KeyError, self.s.remove, x)
        self.assertRaises(TypeError, self.s.remove, [])

    def test_discard(self):
        a, q = ustr('a'), ustr('Q')
        self.s.discard(a)
        self.assertNotIn(a, self.s)
        self.s.discard(q)
        self.assertRaises(TypeError, self.s.discard, [])

    def test_pop(self):
        for i in range(len(self.s)):
            elem = self.s.pop()
            self.assertNotIn(elem, self.s)
        self.assertRaises(KeyError, self.s.pop)

    def test_update(self):
        retval = self.s.update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)
        self.assertRaises(TypeError, self.s.update, [[]])

    def test_update_set(self):
        self.s.update(set(self.items2))
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_ior(self):
        self.s |= set(self.items2)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_intersection_update(self):
        retval = self.s.intersection_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.intersection_update, [[]])

    def test_iand(self):
        self.s &= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_difference_update(self):
        retval = self.s.difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.difference_update, [[]])
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_isub(self):
        self.s -= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_symmetric_difference_update(self):
        retval = self.s.symmetric_difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_ixor(self):
        self.s ^= set(self.items2)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_inplace_on_self(self):
        t = self.s.copy()
        t |= t
        self.assertEqual(t, self.s)
        t &= t
        self.assertEqual(t, self.s)
        t -= t
        self.assertEqual(t, WeakSet())
        t = self.s.copy()
        t ^= t
        self.assertEqual(t, WeakSet())

    def test_eq(self):
        # issue 5964
        self.assertTrue(self.s == self.s)
        self.assertTrue(self.s == WeakSet(self.items))
        self.assertFalse(self.s == set(self.items))
        self.assertFalse(self.s == list(self.items))
        self.assertFalse(self.s == tuple(self.items))
        self.assertFalse(self.s == WeakSet([Foo]))
        self.assertFalse(self.s == 1)

    def test_ne(self):
        self.assertTrue(self.s != set(self.items))
        s1 = WeakSet()
        s2 = WeakSet()
        self.assertFalse(s1 != s2)

    def test_weak_destroy_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        # Create new items to be sure no-one else holds a reference
        items = [ustr(c) for c in ('a', 'b', 'c')]
        s = WeakSet(items)
        it = iter(s)
        next(it)             # Trigger internal iteration
        # Destroy an item
        del items[-1]
        gc.collect()    # just in case
        # We have removed either the first consumed items, or another one
        self.assertIn(len(list(it)), [len(items), len(items) - 1])
        del it
        # The removal has been committed
        self.assertEqual(len(s), len(items))

    def test_weak_destroy_and_mutate_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        items = [ustr(c) for c in string.ascii_letters]
        s = WeakSet(items)
        @contextlib.contextmanager
        def testcontext():
            try:
                it = iter(s)
                # Start iterator
                yielded = ustr(str(next(it)))
                # Schedule an item for removal and recreate it
                u = ustr(str(items.pop()))
                if yielded == u:
                    # The iterator still has a reference to the removed item,
                    # advance it (issue #20006).
                    next(it)
                gc.collect()      # just in case
                yield u
            finally:
                it = None           # should commit all removals

        with testcontext() as u:
            self.assertNotIn(u, s)
        with testcontext() as u:
            self.assertRaises(KeyError, s.remove, u)
        self.assertNotIn(u, s)
        with testcontext() as u:
            s.add(u)
        self.assertIn(u, s)
        t = s.copy()
        with testcontext() as u:
            s.update(t)
        self.assertEqual(len(s), len(t))
        with testcontext() as u:
            s.clear()
        self.assertEqual(len(s), 0)

    def test_len_cycles(self):
        N = 20
        items = [RefCycle() for i in range(N)]
        s = WeakSet(items)
        del items
        it = iter(s)
        try:
            next(it)
        except StopIteration:
            pass
        gc.collect()
        n1 = len(s)
        del it
        gc.collect()
        gc.collect()
        n2 = len(s)
        # one item may be kept alive inside the iterator
        self.assertIn(n1, (0, 1))
        self.assertEqual(n2, 0)

    @support.impl_detail("PyPy has no cyclic collection", pypy=False)
    def test_len_race(self):
        # Extended sanity checks for len() in the face of cyclic collection
        self.addCleanup(gc.set_threshold, *gc.get_threshold())
        for th in range(1, 100):
            N = 20
            gc.collect(0)
            gc.set_threshold(th, th, th)
            items = [RefCycle() for i in range(N)]
            s = WeakSet(items)
            del items
            # All items will be collected at next garbage collection pass
            it = iter(s)
            try:
                next(it)
            except StopIteration:
                pass
            n1 = len(s)
            del it
            n2 = len(s)
            self.assertGreaterEqual(n1, 0)
            self.assertLessEqual(n1, N)
            self.assertGreaterEqual(n2, 0)
            self.assertLessEqual(n2, n1)
Exemplo n.º 22
0
class TestWeakSet(unittest.TestCase):

    def setUp(self):
        # need to keep references to them
        self.items = [ustr(c) for c in ('a', 'b', 'c')]
        self.items2 = [ustr(c) for c in ('x', 'y', 'z')]
        self.letters = [ustr(c) for c in string.ascii_letters]
        self.s = WeakSet(self.items)
        self.d = dict.fromkeys(self.items)
        self.obj = ustr('F')
        self.fs = WeakSet([self.obj])

    def test_methods(self):
        weaksetmethods = dir(WeakSet)
        for method in dir(set):
            if method == 'test_c_api' or method.startswith('_'):
                continue
            self.assert_(method in weaksetmethods,
                         "WeakSet missing method " + method)

    def test_new_or_init(self):
        self.assertRaises(TypeError, WeakSet, [], 2)

    def test_len(self):
        self.assertEqual(len(self.s), len(self.d))
        self.assertEqual(len(self.fs), 1)
        del self.obj
        self.assertEqual(len(self.fs), 0)

    def test_contains(self):
        for c in self.letters:
            self.assertEqual(c in self.s, c in self.d)
        self.assertRaises(TypeError, self.s.__contains__, [[]])
        self.assert_(self.obj in self.fs)
        del self.obj
        self.assert_(ustr('F') not in self.fs)

    def test_union(self):
        u = self.s.union(self.items2)
        for c in self.letters:
            self.assertEqual(c in u, c in self.d or c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(u), WeakSet)
        self.assertRaises(TypeError, self.s.union, [[]])
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet(self.items + self.items2)
            c = C(self.items2)
            self.assertEqual(self.s.union(c), x)

    def test_or(self):
        i = self.s.union(self.items2)
        self.assertEqual(self.s | set(self.items2), i)
        self.assertEqual(self.s | frozenset(self.items2), i)

    def test_intersection(self):
        i = self.s.intersection(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet([])
            self.assertEqual(self.s.intersection(C(self.items2)), x)

    def test_isdisjoint(self):
        self.assert_(self.s.isdisjoint(WeakSet(self.items2)))
        self.assert_(not self.s.isdisjoint(WeakSet(self.letters)))

    def test_and(self):
        i = self.s.intersection(self.items2)
        self.assertEqual(self.s & set(self.items2), i)
        self.assertEqual(self.s & frozenset(self.items2), i)

    def test_difference(self):
        i = self.s.difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c not in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.difference, [[]])

    def test_sub(self):
        i = self.s.difference(self.items2)
        self.assertEqual(self.s - set(self.items2), i)
        self.assertEqual(self.s - frozenset(self.items2), i)

    def test_symmetric_difference(self):
        i = self.s.symmetric_difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])

    def test_xor(self):
        i = self.s.symmetric_difference(self.items2)
        self.assertEqual(self.s ^ set(self.items2), i)
        self.assertEqual(self.s ^ frozenset(self.items2), i)

    def test_sub_and_super(self):
        pl, ql, rl = map(lambda s: [ustr(c) for c in s], ['ab', 'abcde', 'def'])
        p, q, r = map(WeakSet, (pl, ql, rl))
        self.assert_(p < q)
        self.assert_(p <= q)
        self.assert_(q <= q)
        self.assert_(q > p)
        self.assert_(q >= p)
        self.failIf(q < r)
        self.failIf(q <= r)
        self.failIf(q > r)
        self.failIf(q >= r)
        self.assert_(set('a').issubset('abc'))
        self.assert_(set('abc').issuperset('a'))
        self.failIf(set('a').issubset('cbs'))
        self.failIf(set('cbs').issuperset('a'))

    def test_gc(self):
        # Create a nest of cycles to exercise overall ref count check
        class A:
            pass
        s = set(A() for i in range(1000))
        for elem in s:
            elem.cycle = s
            elem.sub = elem
            elem.set = set([elem])

    def test_subclass_with_custom_hash(self):
        # Bug #1257731
        class H(WeakSet):
            def __hash__(self):
                return int(id(self) & 0x7fffffff)
        s=H()
        f=set()
        f.add(s)
        self.assert_(s in f)
        f.remove(s)
        f.add(s)
        f.discard(s)

    def test_init(self):
        s = WeakSet()
        s.__init__(self.items)
        self.assertEqual(s, self.s)
        s.__init__(self.items2)
        self.assertEqual(s, WeakSet(self.items2))
        self.assertRaises(TypeError, s.__init__, s, 2);
        self.assertRaises(TypeError, s.__init__, 1);

    def test_constructor_identity(self):
        s = WeakSet(self.items)
        t = WeakSet(s)
        self.assertNotEqual(id(s), id(t))

    def test_set_literal(self):
        s = set([1,2,3])
        t = {1,2,3}
        self.assertEqual(s, t)

    def test_hash(self):
        self.assertRaises(TypeError, hash, self.s)

    def test_clear(self):
        self.s.clear()
        self.assertEqual(self.s, set())
        self.assertEqual(len(self.s), 0)

    def test_copy(self):
        dup = self.s.copy()
        self.assertEqual(self.s, dup)
        self.assertNotEqual(id(self.s), id(dup))

    def test_add(self):
        x = ustr('Q')
        self.s.add(x)
        self.assert_(x in self.s)
        dup = self.s.copy()
        self.s.add(x)
        self.assertEqual(self.s, dup)
        self.assertRaises(TypeError, self.s.add, [])
        self.fs.add(Foo())
        self.assert_(len(self.fs) == 1)
        self.fs.add(self.obj)
        self.assert_(len(self.fs) == 1)

    def test_remove(self):
        x = ustr('a')
        self.s.remove(x)
        self.assert_(x not in self.s)
        self.assertRaises(KeyError, self.s.remove, x)
        self.assertRaises(TypeError, self.s.remove, [])

    def test_discard(self):
        a, q = ustr('a'), ustr('Q')
        self.s.discard(a)
        self.assert_(a not in self.s)
        self.s.discard(q)
        self.assertRaises(TypeError, self.s.discard, [])

    def test_pop(self):
        for i in range(len(self.s)):
            elem = self.s.pop()
            self.assert_(elem not in self.s)
        self.assertRaises(KeyError, self.s.pop)

    def test_update(self):
        retval = self.s.update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            self.assert_(c in self.s)
        self.assertRaises(TypeError, self.s.update, [[]])

    def test_update_set(self):
        self.s.update(set(self.items2))
        for c in (self.items + self.items2):
            self.assert_(c in self.s)

    def test_ior(self):
        self.s |= set(self.items2)
        for c in (self.items + self.items2):
            self.assert_(c in self.s)

    def test_intersection_update(self):
        retval = self.s.intersection_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)
        self.assertRaises(TypeError, self.s.intersection_update, [[]])

    def test_iand(self):
        self.s &= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)

    def test_difference_update(self):
        retval = self.s.difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)
        self.assertRaises(TypeError, self.s.difference_update, [[]])
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_isub(self):
        self.s -= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)

    def test_symmetric_difference_update(self):
        retval = self.s.symmetric_difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_ixor(self):
        self.s ^= set(self.items2)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)

    def test_inplace_on_self(self):
        t = self.s.copy()
        t |= t
        self.assertEqual(t, self.s)
        t &= t
        self.assertEqual(t, self.s)
        t -= t
        self.assertEqual(t, WeakSet())
        t = self.s.copy()
        t ^= t
        self.assertEqual(t, WeakSet())
Exemplo n.º 23
0
class TestWeakSet(unittest.TestCase):

    def setUp(self):
        # need to keep references to them
        self.items = [ustr(c) for c in ('a', 'b', 'c')]
        self.items2 = [ustr(c) for c in ('x', 'y', 'z')]
        self.ab_items = [ustr(c) for c in 'ab']
        self.abcde_items = [ustr(c) for c in 'abcde']
        self.def_items = [ustr(c) for c in 'def']
        self.ab_weakset = WeakSet(self.ab_items)
        self.abcde_weakset = WeakSet(self.abcde_items)
        self.def_weakset = WeakSet(self.def_items)
        self.letters = [ustr(c) for c in string.ascii_letters]
        self.s = WeakSet(self.items)
        self.d = dict.fromkeys(self.items)
        self.obj = ustr('F')
        self.fs = WeakSet([self.obj])

    def test_methods(self):
        weaksetmethods = dir(WeakSet)
        for method in dir(set):
            if method == 'test_c_api' or method.startswith('_'):
                continue
            self.assertIn(method, weaksetmethods,
                         "WeakSet missing method " + method)

    def test_new_or_init(self):
        self.assertRaises(TypeError, WeakSet, [], 2)

    @support.impl_detail("finalization", graalvm=False)
    def test_len(self):
        self.assertEqual(len(self.s), len(self.d))
        self.assertEqual(len(self.fs), 1)
        del self.obj
        self.assertEqual(len(self.fs), 0)

    @support.impl_detail("finalization", graalvm=False)
    def test_contains(self):
        for c in self.letters:
            self.assertEqual(c in self.s, c in self.d)
        # 1 is not weakref'able, but that TypeError is caught by __contains__
        self.assertNotIn(1, self.s)
        self.assertIn(self.obj, self.fs)
        del self.obj
        self.assertNotIn(ustr('F'), self.fs)

    @support.impl_detail("finalization", graalvm=False)
    def test_union(self):
        u = self.s.union(self.items2)
        for c in self.letters:
            self.assertEqual(c in u, c in self.d or c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(u), WeakSet)
        self.assertRaises(TypeError, self.s.union, [[]])
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet(self.items + self.items2)
            c = C(self.items2)
            self.assertEqual(self.s.union(c), x)
            del c
        self.assertEqual(len(u), len(self.items) + len(self.items2))
        self.items2.pop()
        gc.collect()
        self.assertEqual(len(u), len(self.items) + len(self.items2))

    def test_or(self):
        i = self.s.union(self.items2)
        self.assertEqual(self.s | set(self.items2), i)
        self.assertEqual(self.s | frozenset(self.items2), i)

    @support.impl_detail("finalization", graalvm=False)
    def test_intersection(self):
        s = WeakSet(self.letters)
        i = s.intersection(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.items2 and c in self.letters)
        self.assertEqual(s, WeakSet(self.letters))
        self.assertEqual(type(i), WeakSet)
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet([])
            self.assertEqual(i.intersection(C(self.items)), x)
        self.assertEqual(len(i), len(self.items2))
        self.items2.pop()
        gc.collect()
        self.assertEqual(len(i), len(self.items2))

    def test_isdisjoint(self):
        self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
        self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters)))

    def test_and(self):
        i = self.s.intersection(self.items2)
        self.assertEqual(self.s & set(self.items2), i)
        self.assertEqual(self.s & frozenset(self.items2), i)

    def test_difference(self):
        i = self.s.difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c not in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.difference, [[]])

    def test_sub(self):
        i = self.s.difference(self.items2)
        self.assertEqual(self.s - set(self.items2), i)
        self.assertEqual(self.s - frozenset(self.items2), i)

    @support.impl_detail("finalization", graalvm=False)
    def test_symmetric_difference(self):
        i = self.s.symmetric_difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
        self.assertEqual(len(i), len(self.items) + len(self.items2))
        self.items2.pop()
        gc.collect()
        self.assertEqual(len(i), len(self.items) + len(self.items2))

    def test_xor(self):
        i = self.s.symmetric_difference(self.items2)
        self.assertEqual(self.s ^ set(self.items2), i)
        self.assertEqual(self.s ^ frozenset(self.items2), i)

    def test_sub_and_super(self):
        self.assertTrue(self.ab_weakset <= self.abcde_weakset)
        self.assertTrue(self.abcde_weakset <= self.abcde_weakset)
        self.assertTrue(self.abcde_weakset >= self.ab_weakset)
        self.assertFalse(self.abcde_weakset <= self.def_weakset)
        self.assertFalse(self.abcde_weakset >= self.def_weakset)
        self.assertTrue(set('a').issubset('abc'))
        self.assertTrue(set('abc').issuperset('a'))
        self.assertFalse(set('a').issubset('cbs'))
        self.assertFalse(set('cbs').issuperset('a'))

    def test_lt(self):
        self.assertTrue(self.ab_weakset < self.abcde_weakset)
        self.assertFalse(self.abcde_weakset < self.def_weakset)
        self.assertFalse(self.ab_weakset < self.ab_weakset)
        self.assertFalse(WeakSet() < WeakSet())

    def test_gt(self):
        self.assertTrue(self.abcde_weakset > self.ab_weakset)
        self.assertFalse(self.abcde_weakset > self.def_weakset)
        self.assertFalse(self.ab_weakset > self.ab_weakset)
        self.assertFalse(WeakSet() > WeakSet())

    def test_gc(self):
        # Create a nest of cycles to exercise overall ref count check
        s = WeakSet(Foo() for i in range(1000))
        for elem in s:
            elem.cycle = s
            elem.sub = elem
            elem.set = WeakSet([elem])

    def test_subclass_with_custom_hash(self):
        # Bug #1257731
        class H(WeakSet):
            def __hash__(self):
                return int(id(self) & 0x7fffffff)
        s=H()
        f=set()
        f.add(s)
        self.assertIn(s, f)
        f.remove(s)
        f.add(s)
        f.discard(s)

    def test_init(self):
        s = WeakSet()
        s.__init__(self.items)
        self.assertEqual(s, self.s)
        s.__init__(self.items2)
        self.assertEqual(s, WeakSet(self.items2))
        self.assertRaises(TypeError, s.__init__, s, 2);
        self.assertRaises(TypeError, s.__init__, 1);

    def test_constructor_identity(self):
        s = WeakSet(self.items)
        t = WeakSet(s)
        self.assertNotEqual(id(s), id(t))

    def test_hash(self):
        self.assertRaises(TypeError, hash, self.s)

    def test_clear(self):
        self.s.clear()
        self.assertEqual(self.s, WeakSet([]))
        self.assertEqual(len(self.s), 0)

    def test_copy(self):
        dup = self.s.copy()
        self.assertEqual(self.s, dup)
        self.assertNotEqual(id(self.s), id(dup))

    @support.impl_detail("refcounting", graalvm=False)
    def test_add(self):
        x = ustr('Q')
        self.s.add(x)
        self.assertIn(x, self.s)
        dup = self.s.copy()
        self.s.add(x)
        self.assertEqual(self.s, dup)
        self.assertRaises(TypeError, self.s.add, [])
        self.fs.add(Foo())
        self.assertTrue(len(self.fs) == 1)
        self.fs.add(self.obj)
        self.assertTrue(len(self.fs) == 1)

    def test_remove(self):
        x = ustr('a')
        self.s.remove(x)
        self.assertNotIn(x, self.s)
        self.assertRaises(KeyError, self.s.remove, x)
        self.assertRaises(TypeError, self.s.remove, [])

    def test_discard(self):
        a, q = ustr('a'), ustr('Q')
        self.s.discard(a)
        self.assertNotIn(a, self.s)
        self.s.discard(q)
        self.assertRaises(TypeError, self.s.discard, [])

    def test_pop(self):
        for i in range(len(self.s)):
            elem = self.s.pop()
            self.assertNotIn(elem, self.s)
        self.assertRaises(KeyError, self.s.pop)

    def test_update(self):
        retval = self.s.update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)
        self.assertRaises(TypeError, self.s.update, [[]])

    def test_update_set(self):
        self.s.update(set(self.items2))
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_ior(self):
        self.s |= set(self.items2)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_intersection_update(self):
        retval = self.s.intersection_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.intersection_update, [[]])

    def test_iand(self):
        self.s &= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_difference_update(self):
        retval = self.s.difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.difference_update, [[]])
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_isub(self):
        self.s -= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_symmetric_difference_update(self):
        retval = self.s.symmetric_difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_ixor(self):
        self.s ^= set(self.items2)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_inplace_on_self(self):
        t = self.s.copy()
        t |= t
        self.assertEqual(t, self.s)
        t &= t
        self.assertEqual(t, self.s)
        t -= t
        self.assertEqual(t, WeakSet())
        t = self.s.copy()
        t ^= t
        self.assertEqual(t, WeakSet())

    def test_eq(self):
        # issue 5964
        self.assertTrue(self.s == self.s)
        self.assertTrue(self.s == WeakSet(self.items))
        self.assertFalse(self.s == set(self.items))
        self.assertFalse(self.s == list(self.items))
        self.assertFalse(self.s == tuple(self.items))
        self.assertFalse(self.s == WeakSet([Foo]))
        self.assertFalse(self.s == 1)

    def test_ne(self):
        self.assertTrue(self.s != set(self.items))
        s1 = WeakSet()
        s2 = WeakSet()
        self.assertFalse(s1 != s2)

    @support.impl_detail("finalization", graalvm=False)
    def test_weak_destroy_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        # Create new items to be sure no-one else holds a reference
        items = [ustr(c) for c in ('a', 'b', 'c')]
        s = WeakSet(items)
        it = iter(s)
        next(it)             # Trigger internal iteration
        # Destroy an item
        del items[-1]
        gc.collect()    # just in case
        # We have removed either the first consumed items, or another one
        self.assertIn(len(list(it)), [len(items), len(items) - 1])
        del it
        # The removal has been committed
        self.assertEqual(len(s), len(items))

    @support.impl_detail("finalization", graalvm=False)
    def test_weak_destroy_and_mutate_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        items = [ustr(c) for c in string.ascii_letters]
        s = WeakSet(items)
        @contextlib.contextmanager
        def testcontext():
            try:
                it = iter(s)
                # Start iterator
                yielded = ustr(str(next(it)))
                # Schedule an item for removal and recreate it
                u = ustr(str(items.pop()))
                if yielded == u:
                    # The iterator still has a reference to the removed item,
                    # advance it (issue #20006).
                    next(it)
                gc.collect()      # just in case
                yield u
            finally:
                it = None           # should commit all removals

        with testcontext() as u:
            self.assertNotIn(u, s)
        with testcontext() as u:
            self.assertRaises(KeyError, s.remove, u)
        self.assertNotIn(u, s)
        with testcontext() as u:
            s.add(u)
        self.assertIn(u, s)
        t = s.copy()
        with testcontext() as u:
            s.update(t)
        self.assertEqual(len(s), len(t))
        with testcontext() as u:
            s.clear()
        self.assertEqual(len(s), 0)

    @support.impl_detail("finalization", graalvm=False)
    def test_len_cycles(self):
        N = 20
        items = [RefCycle() for i in range(N)]
        s = WeakSet(items)
        del items
        it = iter(s)
        try:
            next(it)
        except StopIteration:
            pass
        gc.collect()
        n1 = len(s)
        del it
        gc.collect()
        n2 = len(s)
        # one item may be kept alive inside the iterator
        self.assertIn(n1, (0, 1))
        self.assertEqual(n2, 0)

    @support.impl_detail("finalization", graalvm=False)
    def test_len_race(self):
        # Extended sanity checks for len() in the face of cyclic collection
        self.addCleanup(gc.set_threshold, *gc.get_threshold())
        for th in range(1, 100):
            N = 20
            gc.collect(0)
            gc.set_threshold(th, th, th)
            items = [RefCycle() for i in range(N)]
            s = WeakSet(items)
            del items
            # All items will be collected at next garbage collection pass
            it = iter(s)
            try:
                next(it)
            except StopIteration:
                pass
            n1 = len(s)
            del it
            n2 = len(s)
            self.assertGreaterEqual(n1, 0)
            self.assertLessEqual(n1, N)
            self.assertGreaterEqual(n2, 0)
            self.assertLessEqual(n2, n1)

    def test_repr(self):
        assert repr(self.s) == repr(self.s.data)
Exemplo n.º 24
0
class TestWeakSet(unittest.TestCase):

    def setUp(self):
        # need to keep references to them
        self.items = [SomeClass(c) for c in ('a', 'b', 'c')]
        self.items2 = [SomeClass(c) for c in ('x', 'y', 'z')]
        self.letters = [SomeClass(c) for c in string.ascii_letters]
        self.s = WeakSet(self.items)
        self.d = dict.fromkeys(self.items)
        self.obj = SomeClass('F')
        self.fs = WeakSet([self.obj])

    def test_methods(self):
        weaksetmethods = dir(WeakSet)
        for method in dir(set):
            if method == 'test_c_api' or method.startswith('_'):
                continue
            self.assertIn(method, weaksetmethods,
                         "WeakSet missing method " + method)

    def test_new_or_init(self):
        self.assertRaises(TypeError, WeakSet, [], 2)

    def test_len(self):
        self.assertEqual(len(self.s), len(self.d))
        self.assertEqual(len(self.fs), 1)
        del self.obj
        self.assertEqual(len(self.fs), 0)

    def test_contains(self):
        for c in self.letters:
            self.assertEqual(c in self.s, c in self.d)
        # 1 is not weakref'able, but that TypeError is caught by __contains__
        self.assertNotIn(1, self.s)
        self.assertIn(self.obj, self.fs)
        del self.obj
        self.assertNotIn(SomeClass('F'), self.fs)

    def test_union(self):
        u = self.s.union(self.items2)
        for c in self.letters:
            self.assertEqual(c in u, c in self.d or c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(u), WeakSet)
        self.assertRaises(TypeError, self.s.union, [[]])
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet(self.items + self.items2)
            c = C(self.items2)
            self.assertEqual(self.s.union(c), x)

    def test_or(self):
        i = self.s.union(self.items2)
        self.assertEqual(self.s | set(self.items2), i)
        self.assertEqual(self.s | frozenset(self.items2), i)

    def test_intersection(self):
        i = self.s.intersection(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet([])
            self.assertEqual(self.s.intersection(C(self.items2)), x)

    def test_isdisjoint(self):
        self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
        self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters)))

    def test_and(self):
        i = self.s.intersection(self.items2)
        self.assertEqual(self.s & set(self.items2), i)
        self.assertEqual(self.s & frozenset(self.items2), i)

    def test_difference(self):
        i = self.s.difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c not in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.difference, [[]])

    def test_sub(self):
        i = self.s.difference(self.items2)
        self.assertEqual(self.s - set(self.items2), i)
        self.assertEqual(self.s - frozenset(self.items2), i)

    def test_symmetric_difference(self):
        i = self.s.symmetric_difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])

    def test_xor(self):
        i = self.s.symmetric_difference(self.items2)
        self.assertEqual(self.s ^ set(self.items2), i)
        self.assertEqual(self.s ^ frozenset(self.items2), i)

    def test_sub_and_super(self):
        pl, ql, rl = map(lambda s: [SomeClass(c) for c in s], ['ab', 'abcde', 'def'])
        p, q, r = map(WeakSet, (pl, ql, rl))
        self.assertTrue(p < q)
        self.assertTrue(p <= q)
        self.assertTrue(q <= q)
        self.assertTrue(q > p)
        self.assertTrue(q >= p)
        self.assertFalse(q < r)
        self.assertFalse(q <= r)
        self.assertFalse(q > r)
        self.assertFalse(q >= r)
        self.assertTrue(set('a').issubset('abc'))
        self.assertTrue(set('abc').issuperset('a'))
        self.assertFalse(set('a').issubset('cbs'))
        self.assertFalse(set('cbs').issuperset('a'))

    def test_gc(self):
        # Create a nest of cycles to exercise overall ref count check
        s = WeakSet(Foo() for i in range(1000))
        for elem in s:
            elem.cycle = s
            elem.sub = elem
            elem.set = WeakSet([elem])

    def test_subclass_with_custom_hash(self):
        # Bug #1257731
        class H(WeakSet):
            def __hash__(self):
                return int(id(self) & 0x7fffffff)
        s=H()
        f=set()
        f.add(s)
        self.assertIn(s, f)
        f.remove(s)
        f.add(s)
        f.discard(s)

    def test_init(self):
        s = WeakSet()
        s.__init__(self.items)
        self.assertEqual(s, self.s)
        s.__init__(self.items2)
        self.assertEqual(s, WeakSet(self.items2))
        self.assertRaises(TypeError, s.__init__, s, 2);
        self.assertRaises(TypeError, s.__init__, 1);

    def test_constructor_identity(self):
        s = WeakSet(self.items)
        t = WeakSet(s)
        self.assertNotEqual(id(s), id(t))

    def test_hash(self):
        self.assertRaises(TypeError, hash, self.s)

    def test_clear(self):
        self.s.clear()
        self.assertEqual(self.s, WeakSet([]))
        self.assertEqual(len(self.s), 0)

    def test_copy(self):
        dup = self.s.copy()
        self.assertEqual(self.s, dup)
        self.assertNotEqual(id(self.s), id(dup))

    def test_add(self):
        x = SomeClass('Q')
        self.s.add(x)
        self.assertIn(x, self.s)
        dup = self.s.copy()
        self.s.add(x)
        self.assertEqual(self.s, dup)
        self.assertRaises(TypeError, self.s.add, [])
        self.fs.add(Foo())
        self.assertTrue(len(self.fs) == 1)
        self.fs.add(self.obj)
        self.assertTrue(len(self.fs) == 1)

    def test_remove(self):
        x = SomeClass('a')
        self.s.remove(x)
        self.assertNotIn(x, self.s)
        self.assertRaises(KeyError, self.s.remove, x)
        self.assertRaises(TypeError, self.s.remove, [])

    def test_discard(self):
        a, q = SomeClass('a'), SomeClass('Q')
        self.s.discard(a)
        self.assertNotIn(a, self.s)
        self.s.discard(q)
        self.assertRaises(TypeError, self.s.discard, [])

    def test_pop(self):
        for i in range(len(self.s)):
            elem = self.s.pop()
            self.assertNotIn(elem, self.s)
        self.assertRaises(KeyError, self.s.pop)

    def test_update(self):
        retval = self.s.update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)
        self.assertRaises(TypeError, self.s.update, [[]])

    def test_update_set(self):
        self.s.update(set(self.items2))
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_ior(self):
        self.s |= set(self.items2)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_intersection_update(self):
        retval = self.s.intersection_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.intersection_update, [[]])

    def test_iand(self):
        self.s &= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_difference_update(self):
        retval = self.s.difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.difference_update, [[]])
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_isub(self):
        self.s -= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_symmetric_difference_update(self):
        retval = self.s.symmetric_difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_ixor(self):
        self.s ^= set(self.items2)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_inplace_on_self(self):
        t = self.s.copy()
        t |= t
        self.assertEqual(t, self.s)
        t &= t
        self.assertEqual(t, self.s)
        t -= t
        self.assertEqual(t, WeakSet())
        t = self.s.copy()
        t ^= t
        self.assertEqual(t, WeakSet())

    def test_eq(self):
        # issue 5964
        self.assertTrue(self.s == self.s)
        self.assertTrue(self.s == WeakSet(self.items))
        self.assertFalse(self.s == set(self.items))
        self.assertFalse(self.s == list(self.items))
        self.assertFalse(self.s == tuple(self.items))
        self.assertFalse(self.s == 1)

    def test_weak_destroy_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        # Create new items to be sure no-one else holds a reference
        items = [SomeClass(c) for c in ('a', 'b', 'c')]
        s = WeakSet(items)
        it = iter(s)
        next(it)             # Trigger internal iteration
        # Destroy an item
        del items[-1]
        gc.collect()    # just in case
        # We have removed either the first consumed items, or another one
        self.assertIn(len(list(it)), [len(items), len(items) - 1])
        del it
        # The removal has been committed
        self.assertEqual(len(s), len(items))

    def test_weak_destroy_and_mutate_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        items = [SomeClass(c) for c in string.ascii_letters]
        s = WeakSet(items)
        @contextlib.contextmanager
        def testcontext():
            try:
                it = iter(s)
                next(it)
                # Schedule an item for removal and recreate it
                u = SomeClass(str(items.pop()))
                gc.collect()      # just in case
                yield u
            finally:
                it = None           # should commit all removals

        with testcontext() as u:
            self.assertNotIn(u, s)
        with testcontext() as u:
            self.assertRaises(KeyError, s.remove, u)
        self.assertNotIn(u, s)
        with testcontext() as u:
            s.add(u)
        self.assertIn(u, s)
        t = s.copy()
        with testcontext() as u:
            s.update(t)
        self.assertEqual(len(s), len(t))
        with testcontext() as u:
            s.clear()
        self.assertEqual(len(s), 0)
class ObjectRoutingComponent(FootprintToggleMixin, Component, HasTunableFactory, AutoFactoryInit, component_name=types.OBJECT_ROUTING_COMPONENT):
    FACTORY_TUNABLES = {'routing_behavior_map': TunableMapping(description='\n            A mapping of states to behavior. When the object enters a state, its\n            corresponding routing behavior is started.\n            ', key_type=TunableReference(manager=services.get_instance_manager(sims4.resources.Types.OBJECT_STATE), class_restrictions='ObjectStateValue'), value_type=OptionalTunable(tunable=ObjectRoutingBehavior.TunableReference(), enabled_by_default=True, enabled_name='Start_Behavior', disabled_name='Stop_All_Behavior', disabled_value=UNSET)), 'privacy_rules': OptionalTunable(description='\n            If enabled, this object will care about privacy regions.\n            ', tunable=TunableTuple(description='\n                Privacy rules for this object.\n                ', on_enter=TunableTuple(description='\n                    Tuning for when this object is considered a violator of\n                    privacy.\n                    ', loot_list=TunableList(description='\n                        A list of loot operations to apply when the object\n                        enters a privacy region.\n                        ', tunable=LootActions.TunableReference(pack_safe=True))))), 'tracking_category': TunableEnumEntry(description='\n            Used to classify routing objects for the purpose of putting them\n            into buckets for the object routing service to restrict the number\n            of simultaneously-active objects.\n            ', tunable_type=ObjectRoutingBehaviorTrackingCategory, default=ObjectRoutingBehaviorTrackingCategory.NONE)}

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._running_behavior = None
        self._idle_element = None
        self._previous_parent_ref = None
        self._pending_running_behavior = None
        self._privacy_violations = WeakSet()

    @property
    def previous_parent(self):
        if self._previous_parent_ref is not None:
            return self._previous_parent_ref()

    def _setup(self):
        master_controller = services.get_master_controller()
        master_controller.add_sim(self.owner)
        if self.privacy_rules:
            privacy_service = services.privacy_service()
            privacy_service.add_vehicle_to_monitor(self.owner)
        self.owner.routing_component.on_sim_added()
        self.add_callbacks()

    def on_add(self, *_, **__):
        zone = services.current_zone()
        if not zone.is_zone_loading:
            self._setup()

    def on_finalize_load(self):
        self._setup()

    def on_remove(self):
        self.remove_callbacks()
        self.owner.routing_component.on_sim_removed()
        master_controller = services.get_master_controller()
        master_controller.remove_sim(self.owner)
        if self.privacy_rules:
            privacy_service = services.privacy_service()
            privacy_service.remove_vehicle_to_monitor(self.owner)

    def add_callbacks(self):
        if self.privacy_rules:
            self.owner.register_on_location_changed(self._check_privacy)
        self.register_routing_event_callbacks()

    def remove_callbacks(self):
        if self.owner.is_on_location_changed_callback_registered(self._check_privacy):
            self.owner.unregister_on_location_changed(self._check_privacy)
        self.unregister_routing_event_callbacks()

    def handle_privacy_violation(self, privacy):
        if not self.privacy_rules:
            return
        resolver = SingleObjectResolver(self.owner)
        loots = LootOperationList(resolver, self.privacy_rules.on_enter.loot_list)
        loots.apply_operations()
        if privacy not in self._privacy_violations:
            self._privacy_violations.add(privacy)

    def violates_privacy(self, privacy):
        if not self.privacy_rules:
            return False
        elif not privacy.vehicle_violates_privacy(self.owner):
            return False
        return True

    def _check_privacy(self, _, old_location, new_location):
        if not self.privacy_rules:
            return
        for privacy in services.privacy_service().privacy_instances:
            if not privacy.privacy_violators & PrivacyViolators.VEHICLES:
                continue
            new_violation = privacy not in self._privacy_violations
            violates_privacy = self.violates_privacy(privacy)
            if new_violation:
                if violates_privacy:
                    self.handle_privacy_violation(privacy)
                    if not violates_privacy:
                        self._privacy_violations.discard(privacy)
            elif not violates_privacy:
                self._privacy_violations.discard(privacy)

    def on_state_changed(self, state, old_value, new_value, from_init):
        if new_value is old_value:
            return
        if new_value not in self.routing_behavior_map:
            return
        self._stop_runnning_behavior()
        routing_behavior_type = self.routing_behavior_map[new_value]
        if routing_behavior_type is UNSET:
            return
        routing_behavior = routing_behavior_type(self.owner)
        self._set_running_behavior(routing_behavior)
        self._cancel_idle_behavior()

    def on_location_changed(self, old_location):
        parent = self.owner.parent
        if parent is not None:
            self._previous_parent_ref = parent.ref()

    def component_reset(self, reset_reason):
        if self._running_behavior is not None:
            self._pending_running_behavior = type(self._running_behavior)
            self._running_behavior.trigger_hard_stop()
            self._set_running_behavior(None)
        services.get_master_controller().on_reset_sim(self.owner, reset_reason)

    def post_component_reset(self):
        if self._pending_running_behavior is not None:
            routing_behavior = self._pending_running_behavior(self.owner)
            self._set_running_behavior(routing_behavior)
            self._pending_running_behavior = None
            self._cancel_idle_behavior()

    def _cancel_idle_behavior(self):
        if self._idle_element is not None:
            self._idle_element.trigger_soft_stop()
            self._idle_element = None

    def _set_running_behavior(self, new_behavior):
        if new_behavior == self._running_behavior:
            return
        self._running_behavior = new_behavior
        if self.tracking_category and self.tracking_category is not ObjectRoutingBehaviorTrackingCategory.NONE:
            routing_service = services.get_object_routing_service()
            if routing_service:
                if new_behavior:
                    routing_service.on_routing_start(self.owner, self.tracking_category, new_behavior)
                else:
                    routing_service.on_routing_stop(self.owner, self.tracking_category)

    @componentmethod
    def get_idle_element(self):
        self._idle_element = soft_sleep_forever()
        return (self._idle_element, self._cancel_idle_behavior)

    @componentmethod
    def get_next_work(self):
        if self._running_behavior is None or self.owner.has_work_locks:
            return WorkRequest()
        work_request = WorkRequest(work_element=self._running_behavior, required_sims=(self.owner,))
        return work_request

    @componentmethod
    def get_next_work_priority(self):
        return PriorityExtended.SubLow

    @componentmethod
    def on_requested_as_resource(self, other_work):
        if not any(resource.is_sim for resource in other_work.resources):
            return
        self.restart_running_behavior()

    def restart_running_behavior(self):
        routing_behavior_type = type(self._running_behavior) if self._running_behavior is not None else None
        self._stop_runnning_behavior()
        if routing_behavior_type is not None:
            routing_behavior = routing_behavior_type(self.owner)
            self._set_running_behavior(routing_behavior)

    def _stop_runnning_behavior(self):
        if self._running_behavior is not None:
            self._running_behavior.trigger_soft_stop()
            self._set_running_behavior(None)

    @componentmethod
    def get_participant(self, participant_type=ParticipantType.Actor, **kwargs):
        participants = self.get_participants(participant_type=participant_type, **kwargs)
        if not participants:
            return
        if len(participants) > 1:
            raise ValueError('Too many participants returned for {}!'.format(participant_type))
        return next(iter(participants))

    @componentmethod
    def get_participants(self, participant_type, **kwargs):
        if participant_type is ParticipantType.Actor:
            obj = self._running_behavior._obj if self._running_behavior else None
            return (obj,)
        elif participant_type is ParticipantType.Object:
            target = self._running_behavior.get_target() if self._running_behavior else None
            return (target,)
Exemplo n.º 26
0
class AsyncoreLoop(object):

    def __init__(self):
        self._pid = os.getpid()
        self._loop_lock = Lock()
        self._started = False
        self._shutdown = False

        self._conns_lock = Lock()
        self._conns = WeakSet()
        self._thread = None
        atexit.register(partial(_cleanup, weakref.ref(self)))

    def maybe_start(self):
        should_start = False
        did_acquire = False
        try:
            did_acquire = self._loop_lock.acquire(False)
            if did_acquire and not self._started:
                self._started = True
                should_start = True
        finally:
            if did_acquire:
                self._loop_lock.release()

        if should_start:
            self._thread = Thread(target=self._run_loop, name="cassandra_driver_event_loop")
            self._thread.daemon = True
            self._thread.start()

    def _run_loop(self):
        log.debug("Starting asyncore event loop")
        with self._loop_lock:
            while True:
                try:
                    asyncore.loop(timeout=0.001, use_poll=True, count=1000)
                except Exception:
                    log.debug("Asyncore event loop stopped unexepectedly", exc_info=True)
                    break

                if self._shutdown:
                    break

                with self._conns_lock:
                    if len(self._conns) == 0:
                        break

            self._started = False

        log.debug("Asyncore event loop ended")

    def _cleanup(self):
        self._shutdown = True
        if not self._thread:
            return

        log.debug("Waiting for event loop thread to join...")
        self._thread.join(timeout=1.0)
        if self._thread.is_alive():
            log.warning(
                "Event loop thread could not be joined, so shutdown may not be clean. "
                "Please call Cluster.shutdown() to avoid this.")

        log.debug("Event loop thread was joined")

    def connection_created(self, connection):
        with self._conns_lock:
            self._conns.add(connection)

    def connection_destroyed(self, connection):
        with self._conns_lock:
            self._conns.discard(connection)
Exemplo n.º 27
0
class TileableData(EntityData, Tileable, _ExecutableMixin):
    __slots__ = '_cix', '_entities', '_executed_sessions'
    _no_copy_attrs_ = SerializableWithKey._no_copy_attrs_ | {'_cix'}

    # optional fields
    # `nsplits` means the sizes of chunks for each dimension
    _nsplits = TupleField('nsplits',
                          ValueType.tuple(ValueType.uint64),
                          on_serialize=on_serialize_nsplits)

    def __init__(self, *args, **kwargs):
        if kwargs.get('_nsplits', None) is not None:
            kwargs['_nsplits'] = tuple(tuple(s) for s in kwargs['_nsplits'])

        super().__init__(*args, **kwargs)

        if hasattr(self, '_chunks') and self._chunks:
            self._chunks = sorted(self._chunks, key=attrgetter('index'))

        self._entities = WeakSet()
        self._executed_sessions = []

    @property
    def chunk_shape(self):
        if hasattr(self, '_nsplits') and self._nsplits is not None:
            return tuple(map(len, self._nsplits))

    @property
    def chunks(self) -> List["Chunk"]:
        return getattr(self, '_chunks', None)

    @property
    def nsplits(self):
        return getattr(self, '_nsplits', None)

    @nsplits.setter
    def nsplits(self, new_nsplits):
        self._nsplits = new_nsplits

    @property
    def params(self) -> dict:
        # params return the properties which useful to rebuild a new tileable object
        return dict()

    @property
    def cix(self):
        if self.ndim == 0:
            return ChunksIndexer(self)

        try:
            if getattr(self, '_cix', None) is None:
                self._cix = ChunksIndexer(self)
            return self._cix
        except (TypeError, ValueError):
            return ChunksIndexer(self)

    @property
    def entities(self):
        return self._entities

    def is_coarse(self):
        return not hasattr(self, '_chunks') or self._chunks is None or len(
            self._chunks) == 0

    @enter_mode(build=True)
    def attach(self, entity):
        self._entities.add(entity)

    @enter_mode(build=True)
    def detach(self, entity):
        self._entities.discard(entity)
Exemplo n.º 28
0
class TestWeakSet(unittest.TestCase):
    def setUp(self):
        # need to keep references to them
        self.items = [ustr(c) for c in ("a", "b", "c")]
        self.items2 = [ustr(c) for c in ("x", "y", "z")]
        self.letters = [ustr(c) for c in string.ascii_letters]
        self.s = WeakSet(self.items)
        self.d = dict.fromkeys(self.items)
        self.obj = ustr("F")
        self.fs = WeakSet([self.obj])

    def test_methods(self):
        weaksetmethods = dir(WeakSet)
        for method in dir(set):
            if method == "test_c_api" or method.startswith("_"):
                continue
            self.assert_(method in weaksetmethods, "WeakSet missing method " + method)

    def test_new_or_init(self):
        self.assertRaises(TypeError, WeakSet, [], 2)

    def test_len(self):
        self.assertEqual(len(self.s), len(self.d))
        self.assertEqual(len(self.fs), 1)
        del self.obj
        self.assertEqual(len(self.fs), 0)

    def test_contains(self):
        for c in self.letters:
            self.assertEqual(c in self.s, c in self.d)
        self.assertRaises(TypeError, self.s.__contains__, [[]])
        self.assert_(self.obj in self.fs)
        del self.obj
        self.assert_(ustr("F") not in self.fs)

    def test_union(self):
        u = self.s.union(self.items2)
        for c in self.letters:
            self.assertEqual(c in u, c in self.d or c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(u), WeakSet)
        self.assertRaises(TypeError, self.s.union, [[]])
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet(self.items + self.items2)
            c = C(self.items2)
            self.assertEqual(self.s.union(c), x)

    def test_or(self):
        i = self.s.union(self.items2)
        self.assertEqual(self.s | set(self.items2), i)
        self.assertEqual(self.s | frozenset(self.items2), i)

    def test_intersection(self):
        i = self.s.intersection(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet([])
            self.assertEqual(self.s.intersection(C(self.items2)), x)

    def test_isdisjoint(self):
        self.assert_(self.s.isdisjoint(WeakSet(self.items2)))
        self.assert_(not self.s.isdisjoint(WeakSet(self.letters)))

    def test_and(self):
        i = self.s.intersection(self.items2)
        self.assertEqual(self.s & set(self.items2), i)
        self.assertEqual(self.s & frozenset(self.items2), i)

    def test_difference(self):
        i = self.s.difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c not in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.difference, [[]])

    def test_sub(self):
        i = self.s.difference(self.items2)
        self.assertEqual(self.s - set(self.items2), i)
        self.assertEqual(self.s - frozenset(self.items2), i)

    def test_symmetric_difference(self):
        i = self.s.symmetric_difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])

    def test_xor(self):
        i = self.s.symmetric_difference(self.items2)
        self.assertEqual(self.s ^ set(self.items2), i)
        self.assertEqual(self.s ^ frozenset(self.items2), i)

    def test_sub_and_super(self):
        pl, ql, rl = map(lambda s: [ustr(c) for c in s], ["ab", "abcde", "def"])
        p, q, r = map(WeakSet, (pl, ql, rl))
        self.assert_(p < q)
        self.assert_(p <= q)
        self.assert_(q <= q)
        self.assert_(q > p)
        self.assert_(q >= p)
        self.failIf(q < r)
        self.failIf(q <= r)
        self.failIf(q > r)
        self.failIf(q >= r)
        self.assert_(set("a").issubset("abc"))
        self.assert_(set("abc").issuperset("a"))
        self.failIf(set("a").issubset("cbs"))
        self.failIf(set("cbs").issuperset("a"))

    def test_gc(self):
        # Create a nest of cycles to exercise overall ref count check
        s = WeakSet(Foo() for i in range(1000))
        for elem in s:
            elem.cycle = s
            elem.sub = elem
            elem.set = WeakSet([elem])

    def test_subclass_with_custom_hash(self):
        # Bug #1257731
        class H(WeakSet):
            def __hash__(self):
                return int(id(self) & 0x7FFFFFFF)

        s = H()
        f = set()
        f.add(s)
        self.assert_(s in f)
        f.remove(s)
        f.add(s)
        f.discard(s)

    def test_init(self):
        s = WeakSet()
        s.__init__(self.items)
        self.assertEqual(s, self.s)
        s.__init__(self.items2)
        self.assertEqual(s, WeakSet(self.items2))
        self.assertRaises(TypeError, s.__init__, s, 2)
        self.assertRaises(TypeError, s.__init__, 1)

    def test_constructor_identity(self):
        s = WeakSet(self.items)
        t = WeakSet(s)
        self.assertNotEqual(id(s), id(t))

    def test_hash(self):
        self.assertRaises(TypeError, hash, self.s)

    def test_clear(self):
        self.s.clear()
        self.assertEqual(self.s, WeakSet([]))
        self.assertEqual(len(self.s), 0)

    def test_copy(self):
        dup = self.s.copy()
        self.assertEqual(self.s, dup)
        self.assertNotEqual(id(self.s), id(dup))

    def test_add(self):
        x = ustr("Q")
        self.s.add(x)
        self.assert_(x in self.s)
        dup = self.s.copy()
        self.s.add(x)
        self.assertEqual(self.s, dup)
        self.assertRaises(TypeError, self.s.add, [])
        self.fs.add(Foo())
        self.assert_(len(self.fs) == 1)
        self.fs.add(self.obj)
        self.assert_(len(self.fs) == 1)

    def test_remove(self):
        x = ustr("a")
        self.s.remove(x)
        self.assert_(x not in self.s)
        self.assertRaises(KeyError, self.s.remove, x)
        self.assertRaises(TypeError, self.s.remove, [])

    def test_discard(self):
        a, q = ustr("a"), ustr("Q")
        self.s.discard(a)
        self.assert_(a not in self.s)
        self.s.discard(q)
        self.assertRaises(TypeError, self.s.discard, [])

    def test_pop(self):
        for i in range(len(self.s)):
            elem = self.s.pop()
            self.assert_(elem not in self.s)
        self.assertRaises(KeyError, self.s.pop)

    def test_update(self):
        retval = self.s.update(self.items2)
        self.assertEqual(retval, None)
        for c in self.items + self.items2:
            self.assert_(c in self.s)
        self.assertRaises(TypeError, self.s.update, [[]])

    def test_update_set(self):
        self.s.update(set(self.items2))
        for c in self.items + self.items2:
            self.assert_(c in self.s)

    def test_ior(self):
        self.s |= set(self.items2)
        for c in self.items + self.items2:
            self.assert_(c in self.s)

    def test_intersection_update(self):
        retval = self.s.intersection_update(self.items2)
        self.assertEqual(retval, None)
        for c in self.items + self.items2:
            if c in self.items2 and c in self.items:
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)
        self.assertRaises(TypeError, self.s.intersection_update, [[]])

    def test_iand(self):
        self.s &= set(self.items2)
        for c in self.items + self.items2:
            if c in self.items2 and c in self.items:
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)

    def test_difference_update(self):
        retval = self.s.difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in self.items + self.items2:
            if c in self.items and c not in self.items2:
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)
        self.assertRaises(TypeError, self.s.difference_update, [[]])
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_isub(self):
        self.s -= set(self.items2)
        for c in self.items + self.items2:
            if c in self.items and c not in self.items2:
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)

    def test_symmetric_difference_update(self):
        retval = self.s.symmetric_difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in self.items + self.items2:
            if (c in self.items) ^ (c in self.items2):
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_ixor(self):
        self.s ^= set(self.items2)
        for c in self.items + self.items2:
            if (c in self.items) ^ (c in self.items2):
                self.assert_(c in self.s)
            else:
                self.assert_(c not in self.s)

    def test_inplace_on_self(self):
        t = self.s.copy()
        t |= t
        self.assertEqual(t, self.s)
        t &= t
        self.assertEqual(t, self.s)
        t -= t
        self.assertEqual(t, WeakSet())
        t = self.s.copy()
        t ^= t
        self.assertEqual(t, WeakSet())

    def test_eq(self):
        # issue 5964
        self.assertTrue(self.s == self.s)
        self.assertTrue(self.s == WeakSet(self.items))
        self.assertFalse(self.s == set(self.items))
        self.assertFalse(self.s == list(self.items))
        self.assertFalse(self.s == tuple(self.items))
        self.assertFalse(self.s == WeakSet([Foo]))
        self.assertFalse(self.s == 1)
Exemplo n.º 29
0
class TestWeakSet(unittest.TestCase):

    def setUp(self):
        # need to keep references to them
        self.items = [SomeClass(c) for c in ('a', 'b', 'c')]
        self.items2 = [SomeClass(c) for c in ('x', 'y', 'z')]
        self.letters = [SomeClass(c) for c in string.ascii_letters]
        self.ab_items = [SomeClass(c) for c in 'ab']
        self.abcde_items = [SomeClass(c) for c in 'abcde']
        self.def_items = [SomeClass(c) for c in 'def']
        self.ab_weakset = WeakSet(self.ab_items)
        self.abcde_weakset = WeakSet(self.abcde_items)
        self.def_weakset = WeakSet(self.def_items)
        self.s = WeakSet(self.items)
        self.d = dict.fromkeys(self.items)
        self.obj = SomeClass('F')
        self.fs = WeakSet([self.obj])

    def test_methods(self):
        weaksetmethods = dir(WeakSet)
        for method in dir(set):
            if method == 'test_c_api' or method.startswith('_'):
                continue
            self.assertIn(method, weaksetmethods,
                         "WeakSet missing method " + method)

    def test_new_or_init(self):
        self.assertRaises(TypeError, WeakSet, [], 2)

    def test_len(self):
        self.assertEqual(len(self.s), len(self.d))
        self.assertEqual(len(self.fs), 1)
        del self.obj
        test_support.gc_collect()
        # len of weak collections is eventually consistent on
        # Jython. In practice this does not matter because of the
        # nature of weaksets - we cannot rely on what happens in the
        # reaper thread and how it interacts with gc
        self.assertIn(len(self.fs), (0, 1))

    def test_contains(self):
        for c in self.letters:
            self.assertEqual(c in self.s, c in self.d)
        # 1 is not weakref'able, but that TypeError is caught by __contains__
        self.assertNotIn(1, self.s)
        self.assertIn(self.obj, self.fs)
        del self.obj
        test_support.gc_collect()
        self.assertNotIn(SomeClass('F'), self.fs)

    def test_union(self):
        u = self.s.union(self.items2)
        for c in self.letters:
            self.assertEqual(c in u, c in self.d or c in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(u), WeakSet)
        self.assertRaises(TypeError, self.s.union, [[]])
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet(self.items + self.items2)
            c = C(self.items2)
            self.assertEqual(self.s.union(c), x)
            del c
            test_support.gc_collect()
        self.assertEqual(len(list(u)), len(list(self.items)) + len(list(self.items2)))
        self.items2.pop()
        test_support.gc_collect()
        self.assertEqual(len(list(u)), len(list(self.items)) + len(list(self.items2)))

    def test_or(self):
        i = self.s.union(self.items2)
        self.assertEqual(self.s | set(self.items2), i)
        self.assertEqual(self.s | frozenset(self.items2), i)

    def test_intersection(self):
        s = WeakSet(self.letters)
        i = s.intersection(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.items2 and c in self.letters)
        self.assertEqual(s, WeakSet(self.letters))
        self.assertEqual(type(i), WeakSet)
        for C in set, frozenset, dict.fromkeys, list, tuple:
            x = WeakSet([])
            self.assertEqual(i.intersection(C(self.items)), x)
        self.assertEqual(len(i), len(self.items2))
        self.items2.pop()
        test_support.gc_collect()
        self.assertEqual(len(list(i)), len(list(self.items2)))

    def test_isdisjoint(self):
        self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
        self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters)))

    def test_and(self):
        i = self.s.intersection(self.items2)
        self.assertEqual(self.s & set(self.items2), i)
        self.assertEqual(self.s & frozenset(self.items2), i)

    def test_difference(self):
        i = self.s.difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, c in self.d and c not in self.items2)
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.difference, [[]])

    def test_sub(self):
        i = self.s.difference(self.items2)
        self.assertEqual(self.s - set(self.items2), i)
        self.assertEqual(self.s - frozenset(self.items2), i)

    def test_symmetric_difference(self):
        i = self.s.symmetric_difference(self.items2)
        for c in self.letters:
            self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
        self.assertEqual(self.s, WeakSet(self.items))
        self.assertEqual(type(i), WeakSet)
        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
        self.assertEqual(len(i), len(self.items) + len(self.items2))
        self.items2.pop()
        test_support.gc_collect()
        self.assertEqual(len(list(i)), len(list(self.items)) + len(list(self.items2)))

    def test_xor(self):
        i = self.s.symmetric_difference(self.items2)
        self.assertEqual(self.s ^ set(self.items2), i)
        self.assertEqual(self.s ^ frozenset(self.items2), i)

    def test_sub_and_super(self):
        self.assertTrue(self.ab_weakset <= self.abcde_weakset)
        self.assertTrue(self.abcde_weakset <= self.abcde_weakset)
        self.assertTrue(self.abcde_weakset >= self.ab_weakset)
        self.assertFalse(self.abcde_weakset <= self.def_weakset)
        self.assertFalse(self.abcde_weakset >= self.def_weakset)
        self.assertTrue(set('a').issubset('abc'))
        self.assertTrue(set('abc').issuperset('a'))
        self.assertFalse(set('a').issubset('cbs'))
        self.assertFalse(set('cbs').issuperset('a'))

    def test_lt(self):
        self.assertTrue(self.ab_weakset < self.abcde_weakset)
        self.assertFalse(self.abcde_weakset < self.def_weakset)
        self.assertFalse(self.ab_weakset < self.ab_weakset)
        self.assertFalse(WeakSet() < WeakSet())

    def test_gt(self):
        self.assertTrue(self.abcde_weakset > self.ab_weakset)
        self.assertFalse(self.abcde_weakset > self.def_weakset)
        self.assertFalse(self.ab_weakset > self.ab_weakset)
        self.assertFalse(WeakSet() > WeakSet())

    def test_gc(self):
        # Create a nest of cycles to exercise overall ref count check
        s = WeakSet(Foo() for i in range(1000))
        for elem in s:
            elem.cycle = s
            elem.sub = elem
            elem.set = WeakSet([elem])

    def test_subclass_with_custom_hash(self):
        # Bug #1257731
        class H(WeakSet):
            def __hash__(self):
                return int(id(self) & 0x7fffffff)
        s=H()
        f=set()
        f.add(s)
        self.assertIn(s, f)
        f.remove(s)
        f.add(s)
        f.discard(s)

    def test_init(self):
        s = WeakSet()
        s.__init__(self.items)
        self.assertEqual(s, self.s)
        s.__init__(self.items2)
        self.assertEqual(s, WeakSet(self.items2))
        self.assertRaises(TypeError, s.__init__, s, 2);
        self.assertRaises(TypeError, s.__init__, 1);

    def test_constructor_identity(self):
        s = WeakSet(self.items)
        t = WeakSet(s)
        self.assertNotEqual(id(s), id(t))

    def test_hash(self):
        self.assertRaises(TypeError, hash, self.s)

    def test_clear(self):
        self.s.clear()
        self.assertEqual(self.s, WeakSet([]))
        self.assertEqual(len(self.s), 0)

    def test_copy(self):
        dup = self.s.copy()
        self.assertEqual(self.s, dup)
        self.assertNotEqual(id(self.s), id(dup))

    def test_add(self):
        x = SomeClass('Q')
        self.s.add(x)
        self.assertIn(x, self.s)
        dup = self.s.copy()
        self.s.add(x)
        self.assertEqual(self.s, dup)
        if not test_support.is_jython:  # Jython/JVM can weakly reference list and other objects
            self.assertRaises(TypeError, self.s.add, [])
        self.fs.add(Foo())
        test_support.gc_collect()  # CPython assumes Foo() went out of scope and was collected, so ensure the same
        self.assertEqual(len(list(self.fs)), 1)
        self.fs.add(self.obj)
        self.assertEqual(len(list(self.fs)), 1)

    def test_remove(self):
        x = SomeClass('a')
        self.s.remove(x)
        self.assertNotIn(x, self.s)
        self.assertRaises(KeyError, self.s.remove, x)
        if not test_support.is_jython:  # Jython/JVM can weakly reference list and other objects
            self.assertRaises(TypeError, self.s.remove, [])

    def test_discard(self):
        a, q = SomeClass('a'), SomeClass('Q')
        self.s.discard(a)
        self.assertNotIn(a, self.s)
        self.s.discard(q)
        if not test_support.is_jython:  # Jython/JVM can weakly reference list and other objects
            self.assertRaises(TypeError, self.s.discard, [])

    def test_pop(self):
        for i in range(len(self.s)):
            elem = self.s.pop()
            self.assertNotIn(elem, self.s)
        self.assertRaises(KeyError, self.s.pop)

    def test_update(self):
        retval = self.s.update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)
        self.assertRaises(TypeError, self.s.update, [[]])

    def test_update_set(self):
        self.s.update(set(self.items2))
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_ior(self):
        self.s |= set(self.items2)
        for c in (self.items + self.items2):
            self.assertIn(c, self.s)

    def test_intersection_update(self):
        retval = self.s.intersection_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.intersection_update, [[]])

    def test_iand(self):
        self.s &= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items2 and c in self.items:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_difference_update(self):
        retval = self.s.difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        if not test_support.is_jython:  # Jython/JVM can weakly reference list and other objects
            self.assertRaises(TypeError, self.s.difference_update, [[]])
            self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_isub(self):
        self.s -= set(self.items2)
        for c in (self.items + self.items2):
            if c in self.items and c not in self.items2:
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_symmetric_difference_update(self):
        retval = self.s.symmetric_difference_update(self.items2)
        self.assertEqual(retval, None)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)
        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])

    def test_ixor(self):
        self.s ^= set(self.items2)
        for c in (self.items + self.items2):
            if (c in self.items) ^ (c in self.items2):
                self.assertIn(c, self.s)
            else:
                self.assertNotIn(c, self.s)

    def test_inplace_on_self(self):
        t = self.s.copy()
        t |= t
        self.assertEqual(t, self.s)
        t &= t
        self.assertEqual(t, self.s)
        t -= t
        self.assertEqual(t, WeakSet())
        t = self.s.copy()
        t ^= t
        self.assertEqual(t, WeakSet())

    def test_eq(self):
        # issue 5964 (http://bugs.python.org/issue5964)
        self.assertEqual(self.s, self.s)
        self.assertEqual(self.s, WeakSet(self.items))
        # Jython diverges here in the next test because it constructs
        # WeakSet as a subclass of set; this seems to be the proper
        # thing to do given what is the typical comparison
        self.assertEqual(self.s, set(self.items))
        self.assertNotEqual(self.s, list(self.items))
        self.assertNotEqual(self.s, tuple(self.items))
        self.assertNotEqual(self.s, 1)

    def test_weak_destroy_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        # Create new items to be sure no-one else holds a reference
        items = [SomeClass(c) for c in ('a', 'b', 'c')]
        s = WeakSet(items)
        it = iter(s)
        next(it)             # Trigger internal iteration
        # Destroy an item
        del items[-1]
        test_support.gc_collect()    # just in case
        # We have removed either the first consumed items, or another one
        self.assertIn(len(list(it)), [len(items), len(items) - 1])
        del it
        extra_collect()
        # The removal has been committed
        self.assertEqual(len(s), len(items))

    def test_weak_destroy_and_mutate_while_iterating(self):
        # Issue #7105: iterators shouldn't crash when a key is implicitly removed
        items = [SomeClass(c) for c in string.ascii_letters]
        s = WeakSet(items)
        @contextlib.contextmanager
        def testcontext():
            try:
                it = iter(s)
                next(it)
                # Schedule an item for removal and recreate it
                u = SomeClass(str(items.pop()))
                test_support.gc_collect()      # just in case
                yield u
            finally:
                it = None           # should commit all removals

        test_support.gc_collect()

        with testcontext() as u:
            self.assertNotIn(u, s)
        with testcontext() as u:
            self.assertRaises(KeyError, s.remove, u)
        self.assertNotIn(u, s)
        with testcontext() as u:
            s.add(u)
        self.assertIn(u, s)
        t = s.copy()
        with testcontext() as u:
            s.update(t)
        self.assertEqual(len(s), len(t))
        with testcontext() as u:
            s.clear()
        self.assertEqual(len(s), 0)

    def test_len_cycles(self):
        N = 20
        items = [RefCycle() for i in range(N)]
        s = WeakSet(items)
        del items
        # do some gc
        test_support.gc_collect()
        it = iter(s)
        try:
            next(it)
        except StopIteration:
            pass

        # do some gc
        test_support.gc_collect()

        n1 = len(s)
        del it
        # do some gc
        test_support.gc_collect()

        n2 = len(s)
        # one item may be kept alive inside the iterator
        self.assertIn(n1, (0, 1))
        self.assertEqual(n2, 0)

    @unittest.skipIf(test_support.is_jython, "GarbageCollection not deterministic in Jython")
    def test_len_race(self):
        # Extended sanity checks for len() in the face of cyclic collection
        self.addCleanup(gc.set_threshold, *gc.get_threshold())
        for th in range(1, 100):
            N = 20
            gc.collect(0)
            gc.set_threshold(th, th, th)
            items = [RefCycle() for i in range(N)]
            s = WeakSet(items)
            del items
            # All items will be collected at next garbage collection pass
            it = iter(s)
            try:
                next(it)
            except StopIteration:
                pass
            n1 = len(s)
            del it
            n2 = len(s)
            self.assertGreaterEqual(n1, 0)
            self.assertLessEqual(n1, N)
            self.assertGreaterEqual(n2, 0)
            self.assertLessEqual(n2, n1)
Exemplo n.º 30
0
class TileableData(EntityData, _ExecutableMixin):
    __slots__ = "_cix", "_entities", "_executed_sessions"
    _no_copy_attrs_ = Base._no_copy_attrs_ | {"_cix"}

    # optional fields
    # `nsplits` means the sizes of chunks for each dimension
    _nsplits = TupleField(
        "nsplits",
        FieldTypes.tuple(FieldTypes.uint64),
        on_serialize=on_serialize_nsplits,
    )

    def __init__(self: TileableType, *args, **kwargs):
        if kwargs.get("_nsplits", None) is not None:
            kwargs["_nsplits"] = tuple(tuple(s) for s in kwargs["_nsplits"])

        super().__init__(*args, **kwargs)

        try:
            chunks = self._chunks
            if chunks:
                self._chunks = sorted(chunks, key=attrgetter("index"))
        except AttributeError:  # pragma: no cover
            pass

        self._entities = WeakSet()
        self._executed_sessions = []

    @property
    def chunk_shape(self):
        if hasattr(self, "_nsplits") and self._nsplits is not None:
            return tuple(map(len, self._nsplits))

    @property
    def chunks(self) -> List[Chunk]:
        return getattr(self, "_chunks", None)

    @property
    def nsplits(self):
        return getattr(self, "_nsplits", None)

    @nsplits.setter
    def nsplits(self, new_nsplits):
        self._nsplits = new_nsplits

    @property
    def params(self) -> dict:
        # params return the properties which useful to rebuild a new tileable object
        return dict()

    @property
    def cix(self):
        if self.ndim == 0:
            return _ChunksIndexer(self)

        try:
            if getattr(self, "_cix", None) is None:
                self._cix = _ChunksIndexer(self)
            return self._cix
        except (TypeError, ValueError):
            return _ChunksIndexer(self)

    @property
    def entities(self):
        return self._entities

    def is_coarse(self):
        if not hasattr(self, "_chunks"):
            return True
        if not self._chunks:
            return True
        return False

    def attach(self, entity):
        self._entities.add(entity)

    def detach(self, entity):
        self._entities.discard(entity)
Exemplo n.º 31
0
class WindowManager(Manager):

    DIAGNOSTIC_PHANTOM_KEY = "lsp_diagnostic_phantom"

    def __init__(
        self,
        window: sublime.Window,
        workspace: ProjectFolders,
        configs: WindowConfigManager,
    ) -> None:
        self._window = window
        self._configs = configs
        self._sessions = WeakSet()  # type: WeakSet[Session]
        self._workspace = workspace
        self._pending_listeners = deque()  # type: Deque[AbstractViewListener]
        self._listeners = WeakSet()  # type: WeakSet[AbstractViewListener]
        self._new_listener = None  # type: Optional[AbstractViewListener]
        self._new_session = None  # type: Optional[Session]
        self._diagnostic_phantom_set = None  # type: Optional[sublime.PhantomSet]
        self._panel_code_phantoms = None  # type: Optional[sublime.PhantomSet]
        self.total_error_count = 0
        self.total_warning_count = 0
        sublime.set_timeout(
            functools.partial(self._update_panel_main_thread,
                              _NO_DIAGNOSTICS_PLACEHOLDER, []))

    def get_config_manager(self) -> WindowConfigManager:
        return self._configs

    def on_load_project_async(self) -> None:
        self.update_workspace_folders_async()
        self._configs.update()

    def on_post_save_project_async(self) -> None:
        self.on_load_project_async()

    def update_workspace_folders_async(self) -> None:
        if self._workspace.update():
            workspace_folders = self._workspace.get_workspace_folders()
            for session in self._sessions:
                session.update_folders(workspace_folders)

    def enable_config_async(self, config_name: str) -> None:
        self._configs.enable_config(config_name)

    def disable_config_async(self, config_name: str) -> None:
        self._configs.disable_config(config_name)

    def open_location_async(self,
                            location: Location,
                            session_name: Optional[str],
                            view: sublime.View,
                            flags: int = 0,
                            group: int = -1) -> Promise[bool]:
        for session in self.sessions(view):
            if session_name is None or session_name == session.config.name:
                return session.open_location_async(location, flags, group)
        return Promise.resolve(False)

    def register_listener_async(self, listener: AbstractViewListener) -> None:
        set_diagnostics_count(listener.view, self.total_error_count,
                              self.total_warning_count)
        # Update workspace folders in case the user have changed those since window was created.
        # There is no currently no notification in ST that would notify about folder changes.
        self.update_workspace_folders_async()
        self._pending_listeners.appendleft(listener)
        if self._new_listener is None:
            self._dequeue_listener_async()

    def unregister_listener_async(self,
                                  listener: AbstractViewListener) -> None:
        self._listeners.discard(listener)

    def listeners(self) -> Generator[AbstractViewListener, None, None]:
        yield from self._listeners

    def listener_for_view(
            self, view: sublime.View) -> Optional[AbstractViewListener]:
        for listener in self.listeners():
            if listener.view == view:
                return listener
        return None

    def _dequeue_listener_async(self) -> None:
        listener = None  # type: Optional[AbstractViewListener]
        if self._new_listener is not None:
            listener = self._new_listener
            # debug("re-checking listener", listener)
            self._new_listener = None
        else:
            try:
                listener = self._pending_listeners.pop()
                if not listener.view.is_valid():
                    # debug("listener", listener, "is no longer valid")
                    return self._dequeue_listener_async()
                # debug("adding new pending listener", listener)
                self._listeners.add(listener)
            except IndexError:
                # We have handled all pending listeners.
                self._new_session = None
                return
        if self._new_session:
            self._sessions.add(self._new_session)
        self._publish_sessions_to_listener_async(listener)
        if self._new_session:
            if not any(self._new_session.session_views_async()):
                self._sessions.discard(self._new_session)
                self._new_session.end_async()
            self._new_session = None
        config = self._needed_config(listener.view)
        if config:
            # debug("found new config for listener", listener)
            self._new_listener = listener
            self.start_async(config, listener.view)
        else:
            # debug("no new config found for listener", listener)
            self._new_listener = None
            self._dequeue_listener_async()

    def _publish_sessions_to_listener_async(
            self, listener: AbstractViewListener) -> None:
        inside_workspace = self._workspace.contains(listener.view)
        scheme = urllib.parse.urlparse(listener.get_uri()).scheme
        for session in self._sessions:
            if session.can_handle(listener.view,
                                  scheme,
                                  capability=None,
                                  inside_workspace=inside_workspace):
                # debug("registering session", session.config.name, "to listener", listener)
                try:
                    listener.on_session_initialized_async(session)
                except Exception as ex:
                    message = "failed to register session {} to listener {}".format(
                        session.config.name, listener)
                    exception_log(message, ex)

    def window(self) -> sublime.Window:
        return self._window

    def sessions(
            self,
            view: sublime.View,
            capability: Optional[str] = None
    ) -> Generator[Session, None, None]:
        inside_workspace = self._workspace.contains(view)
        sessions = list(self._sessions)
        uri = view.settings().get("lsp_uri")
        if not isinstance(uri, str):
            return
        scheme = urllib.parse.urlparse(uri).scheme
        for session in sessions:
            if session.can_handle(view, scheme, capability, inside_workspace):
                yield session

    def get_session(self, config_name: str,
                    file_path: str) -> Optional[Session]:
        return self._find_session(config_name, file_path)

    def _can_start_config(self, config_name: str, file_path: str) -> bool:
        return not bool(self._find_session(config_name, file_path))

    def _find_session(self, config_name: str,
                      file_path: str) -> Optional[Session]:
        inside = self._workspace.contains(file_path)
        for session in self._sessions:
            if session.config.name == config_name and session.handles_path(
                    file_path, inside):
                return session
        return None

    def _needed_config(self, view: sublime.View) -> Optional[ClientConfig]:
        configs = self._configs.match_view(view)
        handled = False
        file_name = view.file_name()
        inside = self._workspace.contains(view)
        for config in configs:
            handled = False
            for session in self._sessions:
                if config.name == session.config.name and session.handles_path(
                        file_name, inside):
                    handled = True
                    break
            if not handled:
                return config
        return None

    def start_async(self, config: ClientConfig,
                    initiating_view: sublime.View) -> None:
        config = ClientConfig.from_config(config, {})
        file_path = initiating_view.file_name() or ''
        if not self._can_start_config(config.name, file_path):
            # debug('Already starting on this window:', config.name)
            return
        try:
            workspace_folders = sorted_workspace_folders(
                self._workspace.folders, file_path)
            plugin_class = get_plugin(config.name)
            variables = extract_variables(self._window)
            cwd = None  # type: Optional[str]
            if plugin_class is not None:
                if plugin_class.needs_update_or_installation():
                    config.set_view_status(initiating_view, "installing...")
                    plugin_class.install_or_update()
                additional_variables = plugin_class.additional_variables()
                if isinstance(additional_variables, dict):
                    variables.update(additional_variables)
                cannot_start_reason = plugin_class.can_start(
                    self._window, initiating_view, workspace_folders, config)
                if cannot_start_reason:
                    config.erase_view_status(initiating_view)
                    message = "cannot start {}: {}".format(
                        config.name, cannot_start_reason)
                    self._configs.disable_config(config.name,
                                                 only_for_session=True)
                    # Continue with handling pending listeners
                    self._new_session = None
                    sublime.set_timeout_async(self._dequeue_listener_async)
                    return self._window.status_message(message)
                cwd = plugin_class.on_pre_start(self._window, initiating_view,
                                                workspace_folders, config)
            config.set_view_status(initiating_view, "starting...")
            session = Session(self, self._create_logger(config.name),
                              workspace_folders, config, plugin_class)
            if cwd:
                transport_cwd = cwd  # type: Optional[str]
            else:
                transport_cwd = workspace_folders[
                    0].path if workspace_folders else None
            transport_config = config.resolve_transport_config(variables)
            transport = create_transport(transport_config, transport_cwd,
                                         session)
            if plugin_class:
                plugin_class.on_post_start(self._window, initiating_view,
                                           workspace_folders, config)
            config.set_view_status(initiating_view, "initialize")
            session.initialize_async(variables=variables,
                                     transport=transport,
                                     working_directory=cwd,
                                     init_callback=functools.partial(
                                         self._on_post_session_initialize,
                                         initiating_view))
            self._new_session = session
        except Exception as e:
            message = "".join((
                "Failed to start {0} - disabling for this window for the duration of the current session.\n",
                "Re-enable by running \"LSP: Enable Language Server In Project\" from the Command Palette.",
                "\n\n--- Error: ---\n{1}")).format(config.name, str(e))
            exception_log(
                "Unable to start subprocess for {}".format(config.name), e)
            if isinstance(e, CalledProcessError):
                print("Server output:\n{}".format(
                    e.output.decode('utf-8', 'replace')))
            self._configs.disable_config(config.name, only_for_session=True)
            config.erase_view_status(initiating_view)
            sublime.message_dialog(message)
            # Continue with handling pending listeners
            self._new_session = None
            sublime.set_timeout_async(self._dequeue_listener_async)

    def _on_post_session_initialize(self,
                                    initiating_view: sublime.View,
                                    session: Session,
                                    is_error: bool = False) -> None:
        if is_error:
            session.config.erase_view_status(initiating_view)
            self._new_listener = None
            self._new_session = None
        else:
            sublime.set_timeout_async(self._dequeue_listener_async)

    def _create_logger(self, config_name: str) -> Logger:
        logger_map = {
            "panel": PanelLogger,
            "remote": RemoteLogger,
        }
        loggers = []
        for logger_type in userprefs().log_server:
            if logger_type not in logger_map:
                debug(
                    "Invalid logger type ({}) specified for log_server settings"
                    .format(logger_type))
                continue
            loggers.append(logger_map[logger_type])
        if len(loggers) == 0:
            return RouterLogger()  # logs nothing
        elif len(loggers) == 1:
            return loggers[0](self, config_name)
        else:
            router_logger = RouterLogger()
            for logger in loggers:
                router_logger.append(logger(self, config_name))
            return router_logger

    def handle_message_request(self, session: Session, params: Any,
                               request_id: Any) -> None:
        view = self._window.active_view()
        if view:
            MessageRequestHandler(view, session, request_id, params,
                                  session.config.name).show()

    def restart_sessions_async(self) -> None:
        self._end_sessions_async()
        listeners = list(self._listeners)
        self._listeners.clear()
        for listener in listeners:
            self.register_listener_async(listener)

    def _end_sessions_async(self) -> None:
        for session in self._sessions:
            session.end_async()
        self._sessions.clear()

    def end_config_sessions_async(self, config_name: str) -> None:
        sessions = list(self._sessions)
        for session in sessions:
            if session.config.name == config_name:
                session.end_async()
                self._sessions.discard(session)

    def get_project_path(self, file_path: str) -> Optional[str]:
        candidate = None  # type: Optional[str]
        for folder in self._workspace.folders:
            if file_path.startswith(folder):
                if candidate is None or len(folder) > len(candidate):
                    candidate = folder
        return candidate

    def should_present_diagnostics(self, uri: DocumentUri) -> Optional[str]:
        scheme, path = parse_uri(uri)
        if scheme != "file":
            return None
        if not self._workspace.contains(path):
            return "not inside window folders"
        view = self._window.active_view()
        if not view:
            return None
        settings = view.settings()
        if matches_pattern(path, settings.get("binary_file_patterns")):
            return "matches a pattern in binary_file_patterns"
        if matches_pattern(path, settings.get("file_exclude_patterns")):
            return "matches a pattern in file_exclude_patterns"
        if matches_pattern(path, settings.get("folder_exclude_patterns")):
            return "matches a pattern in folder_exclude_patterns"
        return None

    def on_post_exit_async(self, session: Session, exit_code: int,
                           exception: Optional[Exception]) -> None:
        self._sessions.discard(session)
        for listener in self._listeners:
            listener.on_session_shutdown_async(session)
        if exit_code != 0 or exception:
            config = session.config
            msg = "".join((
                "{0} exited with status code {1}. ",
                "Do you want to restart it? If you choose Cancel, it will be disabled for this window for the ",
                "duration of the current session. ",
                "Re-enable by running \"LSP: Enable Language Server In Project\" from the Command Palette."
            )).format(config.name, exit_code)
            if exception:
                msg += "\n\n--- Error: ---\n{}".format(str(exception))
            if sublime.ok_cancel_dialog(msg, "Restart {}".format(config.name)):
                for listener in self._listeners:
                    self.register_listener_async(listener)
            else:
                self._configs.disable_config(config.name,
                                             only_for_session=True)

    def plugin_unloaded(self) -> None:
        """
        This is called **from the main thread** when the plugin unloads. In that case we must destroy all sessions
        from the main thread. That could lead to some dict/list being mutated while iterated over, so be careful
        """
        self._end_sessions_async()

    def handle_server_message(self, server_name: str, message: str) -> None:
        sublime.set_timeout(
            lambda: log_server_message(self._window, server_name, message))

    def handle_log_message(self, session: Session, params: Any) -> None:
        self.handle_server_message(session.config.name,
                                   extract_message(params))

    def handle_stderr_log(self, session: Session, message: str) -> None:
        self.handle_server_message(session.config.name, message)

    def handle_show_message(self, session: Session, params: Any) -> None:
        sublime.status_message("{}: {}".format(session.config.name,
                                               extract_message(params)))

    def update_diagnostics_panel_async(self) -> None:
        to_render = []  # type: List[str]
        self.total_error_count = 0
        self.total_warning_count = 0
        listeners = list(self._listeners)
        prephantoms = []  # type: List[Tuple[int, int, str, str]]
        row = 0
        contributions = OrderedDict(
        )  # type: OrderedDict[str, List[Tuple[str, Optional[int], Optional[str], Optional[str]]]]
        for session in self._sessions:
            local_errors, local_warnings = session.diagnostics_manager.sum_total_errors_and_warnings_async(
            )
            self.total_error_count += local_errors
            self.total_warning_count += local_warnings
            for path, contribution in session.diagnostics_manager.diagnostics_panel_contributions_async(
            ):
                seen = path in contributions
                contributions.setdefault(path, []).extend(contribution)
                if not seen:
                    contributions.move_to_end(path)
        for path, contribution in contributions.items():
            to_render.append("{}:".format(path))
            row += 1
            for content, offset, code, href in contribution:
                to_render.append(content)
                if offset is not None and code is not None and href is not None:
                    prephantoms.append((row, offset, code, href))
                row += content.count("\n") + 1
            to_render.append("")  # add spacing between filenames
            row += 1
        for listener in listeners:
            set_diagnostics_count(listener.view, self.total_error_count,
                                  self.total_warning_count)
        characters = "\n".join(to_render)
        if not characters:
            characters = _NO_DIAGNOSTICS_PLACEHOLDER
        sublime.set_timeout(
            functools.partial(self._update_panel_main_thread, characters,
                              prephantoms))

    def _update_panel_main_thread(
            self, characters: str, prephantoms: List[Tuple[int, int, str,
                                                           str]]) -> None:
        panel = ensure_diagnostics_panel(self._window)
        if not panel or not panel.is_valid():
            return
        panel.run_command("lsp_update_panel", {"characters": characters})
        if self._panel_code_phantoms is None:
            self._panel_code_phantoms = sublime.PhantomSet(panel, "hrefs")
        phantoms = []  # type: List[sublime.Phantom]
        for row, col, code, href in prephantoms:
            point = panel.text_point(row, col)
            region = sublime.Region(point, point)
            phantoms.append(
                sublime.Phantom(region, make_link(href, code),
                                sublime.LAYOUT_INLINE))
        self._panel_code_phantoms.update(phantoms)

    def show_diagnostics_panel_async(self) -> None:
        if self._window.active_panel() is None:
            self._window.run_command("show_panel",
                                     {"panel": "output.diagnostics"})
Exemplo n.º 32
0
class AsyncoreLoop(object):

    def __init__(self):
        self._pid = os.getpid()
        self._loop_lock = Lock()
        self._started = False
        self._shutdown = False

        self._conns_lock = Lock()
        self._conns = WeakSet()
        self._thread = None
        atexit.register(partial(_cleanup, weakref.ref(self)))

    def maybe_start(self):
        should_start = False
        did_acquire = False
        try:
            did_acquire = self._loop_lock.acquire(False)
            if did_acquire and not self._started:
                self._started = True
                should_start = True
        finally:
            if did_acquire:
                self._loop_lock.release()

        if should_start:
            self._thread = Thread(target=self._run_loop, name="cassandra_driver_event_loop")
            self._thread.daemon = True
            self._thread.start()

    def _run_loop(self):
        log.debug("Starting asyncore event loop")
        with self._loop_lock:
            while True:
                try:
                    asyncore.loop(timeout=0.001, use_poll=True, count=1000)
                except Exception:
                    log.debug("Asyncore event loop stopped unexepectedly", exc_info=True)
                    break

                if self._shutdown:
                    break

                with self._conns_lock:
                    if len(self._conns) == 0:
                        break

            self._started = False

        log.debug("Asyncore event loop ended")

    def _cleanup(self):
        self._shutdown = True
        if not self._thread:
            return

        log.debug("Waiting for event loop thread to join...")
        self._thread.join(timeout=1.0)
        if self._thread.is_alive():
            log.warning(
                "Event loop thread could not be joined, so shutdown may not be clean. "
                "Please call Cluster.shutdown() to avoid this.")

        log.debug("Event loop thread was joined")

    def connection_created(self, connection):
        with self._conns_lock:
            self._conns.add(connection)

    def connection_destroyed(self, connection):
        with self._conns_lock:
            self._conns.discard(connection)
Exemplo n.º 33
0
class Privacy(LineOfSight):
    _PRIVACY_SURFACE_BLOCKING_FOOTPRINT_COST = 100000
    _PRIVACY_DISCOURAGEMENT_COST = routing.get_default_discouragement_cost()
    _SHOO_CONSTRAINT_RADIUS = Tunable(description='\n        The radius of the constraint a Shooed Sim will attempt to route to.\n        ', tunable_type=float, default=2.5)
    _UNAVAILABLE_TOOLTIP = TunableLocalizedStringFactory(description='\n        Tooltip displayed when an object is not accessible due to being inside\n        a privacy region.\n        ')
    _EMBARRASSED_AFFORDANCE = TunableReference(description='\n        The affordance a Sim will play when getting embarrassed by walking in\n        on a privacy situation.\n        ', manager=services.get_instance_manager(Types.INTERACTION))

    def __init__(self, *, interaction=None, tests=None, shoo_exempt_tests=None, max_line_of_sight_radius=None, map_divisions=None, simplification_ratio=None, boundary_epsilon=None, facing_offset=None, routing_surface_only=None, shoo_constraint_radius=None, unavailable_tooltip=None, embarrassed_affordance=None, reserved_surface_space=None, vehicle_tests=None, central_object=None, post_route_affordance=None, add_to_privacy_service=True, privacy_cost_override=None, additional_exit_offsets=None, persistent_instance=False, privacy_violators=None):
        super().__init__(max_line_of_sight_radius, map_divisions, simplification_ratio, boundary_epsilon)
        logger.assert_raise(bool(interaction) != bool(central_object), 'Privacy must define either one of interaction or central object, and never both.')
        self._max_line_of_sight_radius = max_line_of_sight_radius
        self._interaction = interaction
        self._tests = tests
        self._shoo_exempt_tests = shoo_exempt_tests
        self._privacy_constraints = []
        self._allowed_sims = WeakSet()
        self._disallowed_sims = WeakSet()
        self._violators = WeakSet()
        self._late_violators = WeakSet()
        self._exempt_sims = WeakSet()
        self.is_active = False
        self.has_shooed = False
        self.central_object = central_object
        self.additional_exit_offsets = additional_exit_offsets
        self._multi_surface = True
        self.persistent_instance = persistent_instance
        self._routing_surface_only = routing_surface_only
        self._shoo_constraint_radius = shoo_constraint_radius
        self._unavailable_tooltip = unavailable_tooltip
        self._embarrassed_affordance = embarrassed_affordance
        self._reserved_surface_space = reserved_surface_space
        self._post_route_affordance = post_route_affordance
        self._privacy_cost_override = privacy_cost_override
        self.privacy_violators = privacy_violators
        self._vehicle_tests = vehicle_tests
        self._pushed_interactions = []
        if add_to_privacy_service:
            self.add_privacy()

    @property
    def shoo_constraint_radius(self):
        return self._shoo_constraint_radius or self._SHOO_CONSTRAINT_RADIUS

    @property
    def unavailable_tooltip(self):
        return self._unavailable_toolip or self._UNAVAILABLE_TOOLTIP

    @property
    def embarrassed_affordance(self):
        return self._embarrassed_affordance or self._EMBARRASSED_AFFORDANCE

    @property
    def privacy_discouragement_cost(self):
        return self._privacy_cost_override or self._PRIVACY_DISCOURAGEMENT_COST

    @property
    def interaction(self):
        return self._interaction

    @property
    def is_active(self) -> bool:
        return self._is_active

    @is_active.setter
    def is_active(self, value):
        self._is_active = value

    def _is_sim_allowed(self, sim):
        if self._tests:
            resolver = SingleSimResolver(sim.sim_info) if self._interaction is None else self._interaction.get_resolver(target=sim)
            if self._tests and self._tests.run_tests(resolver):
                return True
            elif self._interaction is not None and self._interaction.can_sim_violate_privacy(sim):
                return True
        if self._interaction is not None and self._interaction.can_sim_violate_privacy(sim):
            return True
        return False

    def evaluate_sim(self, sim):
        if self._is_sim_allowed(sim):
            self._allowed_sims.add(sim)
            return True
        self._disallowed_sims.add(sim)
        return False

    def build_privacy(self, target=None):
        self.is_active = True
        if self.central_object is None:
            target_object = self._interaction.get_participant(ParticipantType.Object)
            target_object = None if target_object.is_sim else target_object
            self.central_object = target_object or (target or self._interaction.sim)
        if self._routing_surface_only:
            allow_object_routing_surface = True
            routing_surface = self.central_object.provided_routing_surface
            if routing_surface is None:
                return False
        else:
            allow_object_routing_surface = False
            routing_surface = self.central_object.routing_surface
        self.generate(self.central_object.position, routing_surface, allow_object_routing_surface=allow_object_routing_surface)
        for poly in self.constraint.geometry.polygon:
            self._privacy_constraints.append(PolygonFootprint(poly, routing_surface=routing_surface, cost=self.privacy_discouragement_cost, footprint_type=FootprintType.FOOTPRINT_TYPE_PATH, enabled=True))
        if self._reserved_surface_space is not None and target is not None:
            reserved_space = self._reserved_surface_space.reserved_space
            try:
                polygon = _generate_single_poly_rectangle_points(target.position, target.part_owner.orientation.transform_vector(sims4.math.Vector3.Z_AXIS()), target.part_owner.orientation.transform_vector(sims4.math.Vector3.X_AXIS()), reserved_space.left, reserved_space.right, reserved_space.front, reserved_space.back)
            except AttributeError as exc:
                raise AttributeError('Interaction: {} is trying to reserve surface space with sim as target. Exception:{}'.format(self._interaction, exc))
            routing_surface = self.central_object.provided_routing_surface
            if routing_surface is None:
                routing_surface = target.routing_surface
            footprint_cost = self.privacy_discouragement_cost if self._reserved_surface_space.allow_routing else self._PRIVACY_SURFACE_BLOCKING_FOOTPRINT_COST
            self._privacy_constraints.append(PolygonFootprint(polygon, routing_surface=routing_surface, cost=footprint_cost, footprint_type=FootprintType.FOOTPRINT_TYPE_PATH, enabled=True))
        if self.privacy_violators & PrivacyViolators.SIM:
            if self._interaction is not None:
                self._allowed_sims.update(self._interaction.get_participants(ParticipantType.AllSims))
            for sim in services.sim_info_manager().instanced_sims_gen():
                if sim not in self._allowed_sims:
                    self.evaluate_sim(sim)
            violating_sims = self.find_violating_sims()
            self._exempt_sims = set([s for s in violating_sims if self.is_sim_shoo_exempt(s)])
            self._cancel_unavailable_interactions(violating_sims)
            self._add_overrides_and_constraints_if_needed(violating_sims)
        if self.privacy_violators & PrivacyViolators.VEHICLES:
            violating_vehicles = self.find_violating_vehicles()
            for vehicle in violating_vehicles:
                vehicle.objectrouting_component.handle_privacy_violation(self)
        return True

    def cleanup_privacy_instance(self):
        if self.is_active:
            self.is_active = False
            for sim in self._allowed_sims:
                self.remove_override_for_sim(sim)
            for sim in self._late_violators:
                self.remove_override_for_sim(sim)
            del self._privacy_constraints[:]
            self._allowed_sims.clear()
            self._disallowed_sims.clear()
            self._violators.clear()
            self._late_violators.clear()
            self._exempt_sims.clear()
            self._cancel_pushed_interactions()

    def add_privacy(self):
        services.privacy_service().add_instance(self)

    def remove_privacy(self):
        self.cleanup_privacy_instance()
        services.privacy_service().remove_instance(self)

    def intersects_with_object(self, obj):
        if obj.routing_surface != self.central_object.routing_surface:
            return False
        delta = obj.position - self.central_object.position
        distance = delta.magnitude_2d_squared()
        if distance > self.max_line_of_sight_radius*self.max_line_of_sight_radius:
            return False
        object_footprint = obj.footprint_polygon
        if object_footprint is None:
            object_footprint = sims4.geometry.CompoundPolygon([sims4.geometry.Polygon([obj.position])])
        return self.constraint.geometry.polygon.intersects(object_footprint)

    def vehicle_violates_privacy(self, vehicle):
        if vehicle.objectrouting_component is None:
            return False
        if self._vehicle_tests is not None:
            resolver = SingleObjectResolver(vehicle)
            if self._vehicle_tests.run_tests(resolver):
                return False
            elif not self.intersects_with_object(vehicle):
                return False
        elif not self.intersects_with_object(vehicle):
            return False
        return True

    def find_violating_vehicles(self):
        violators = []
        privacy_service = services.privacy_service()
        for vehicle in privacy_service.get_potential_vehicle_violators():
            if self.vehicle_violates_privacy(vehicle):
                violators.append(vehicle)
        return violators

    def find_violating_sims(self, consider_exempt=True):
        if not self.is_active:
            return []
        if not self.privacy_violators & PrivacyViolators.SIM:
            return []
        check_all_surfaces_on_level = not self._routing_surface_only
        nearby_sims = placement.get_nearby_sims_gen(self.central_object.position, self._routing_surface, radius=self.max_line_of_sight_radius, exclude=self._allowed_sims, only_sim_position=True, check_all_surfaces_on_level=check_all_surfaces_on_level)
        violators = []
        for sim in nearby_sims:
            if consider_exempt and sim in self._exempt_sims:
                continue
            if any(sim_primitive.is_traversing_portal() for sim_primitive in sim.primitives if isinstance(sim_primitive, FollowPath)):
                continue
            if sim not in self._disallowed_sims and self.evaluate_sim(sim):
                continue
            if sims4.geometry.test_point_in_compound_polygon(sim.position, self.constraint.geometry.polygon):
                violators.append(sim)
        return violators

    def is_sim_shoo_exempt(self, sim):
        if sim in self._exempt_sims:
            return True
        if self.central_object.provided_routing_surface == sim.location.routing_surface:
            return False
        elif self._shoo_exempt_tests:
            resolver = SingleSimResolver(sim.sim_info)
            if self._shoo_exempt_tests.run_tests(resolver):
                return True
        return False

    def add_exempt_sim(self, sim):
        self._exempt_sims.add(sim)

    def _add_overrides_and_constraints_if_needed(self, violating_sims):
        for sim in self._allowed_sims:
            self.add_override_for_sim(sim)
        for sim in violating_sims:
            self._violators.add(sim)
            if sim in self._exempt_sims:
                continue
            liabilities = ((SHOO_LIABILITY, ShooLiability(self, sim)),)
            result = self._route_sim_away(sim, liabilities=liabilities)
            if result:
                self._pushed_interactions.append(result.interaction)

    def _cancel_unavailable_interactions(self, violating_sims):
        for sim in violating_sims:
            if sim in self._exempt_sims:
                continue
            interactions_to_cancel = set()
            if sim.queue.running is not None:
                interactions_to_cancel.add(sim.queue.running)
            for interaction in sim.si_state:
                if interaction.is_super:
                    if interaction.target is not None:
                        if sim.locked_from_obj_by_privacy(interaction.target):
                            interactions_to_cancel.add(interaction)
            for interaction in sim.queue:
                if interaction.target is not None and sim.locked_from_obj_by_privacy(interaction.target):
                    interactions_to_cancel.add(interaction)
                elif interaction.target is not None:
                    break
            for interaction in interactions_to_cancel:
                interaction.cancel(FinishingType.INTERACTION_INCOMPATIBILITY, cancel_reason_msg='Canceled due to incompatibility with privacy instance.')

    def _route_sim_away(self, sim, liabilities=()):
        context = InteractionContext(sim, InteractionContext.SOURCE_SCRIPT, Priority.High, insert_strategy=QueueInsertStrategy.NEXT)
        from interactions.utils.satisfy_constraint_interaction import BuildAndForceSatisfyShooConstraintInteraction
        result = sim.push_super_affordance(BuildAndForceSatisfyShooConstraintInteraction, None, context, liabilities=liabilities, privacy_inst=self, name_override='BuildShooFromPrivacy')
        if result:
            if self._post_route_affordance is not None:

                def route_away_callback(_):
                    post_route_context = context.clone_for_continuation(result.interaction)
                    sim.push_super_affordance(self._post_route_affordance, None, post_route_context)

                result.interaction.register_on_finishing_callback(route_away_callback)
        else:
            logger.debug('Failed to push BuildAndForceSatisfyShooConstraintInteraction on Sim {} to route them out of a privacy area.  Result: {}', sim, result, owner='tastle')
            if self.interaction is not None:
                self.interaction.cancel(FinishingType.TRANSITION_FAILURE, cancel_reason_msg='Failed to shoo Sims away.')
        return result

    def _cancel_pushed_interactions(self):
        for interaction in self._pushed_interactions:
            interaction.cancel(FinishingType.AUTO_EXIT, cancel_reason_msg='Privacy finished and is cleaning up.')
        self._pushed_interactions.clear()

    def handle_late_violator(self, sim):
        self._cancel_unavailable_interactions((sim,))
        self.add_override_for_sim(sim)
        liabilities = ((LATE_SHOO_LIABILITY, LateShooLiability(self, sim)),)
        result = self._route_sim_away(sim, liabilities=liabilities)
        if not result:
            return
        if not self._violators:
            context = InteractionContext(sim, InteractionContext.SOURCE_SCRIPT, Priority.High, insert_strategy=QueueInsertStrategy.NEXT)
            if self.interaction is None:
                result = sim.push_super_affordance(self.embarrassed_affordance, sim, context)
            else:
                result = sim.push_super_affordance(self.embarrassed_affordance, self.interaction.get_participant(ParticipantType.Actor), context)
            if not result and not services.sim_spawner_service().sim_is_leaving(sim):
                logger.warn('Failed to push the embarrassed affordance on Sim {}. Interaction {}. Result {}. Context {} ', sim, self.interaction, result, context, owner='tastle')
                return
        self._late_violators.add(sim)

    def add_override_for_sim(self, sim):
        for footprint in self._privacy_constraints:
            sim.routing_context.ignore_footprint_contour(footprint.footprint_id)

    def remove_override_for_sim(self, sim):
        for footprint in self._privacy_constraints:
            sim.routing_context.remove_footprint_contour_override(footprint.footprint_id)

    @property
    def allowed_sims(self):
        return self._allowed_sims

    @property
    def disallowed_sims(self):
        return self._disallowed_sims

    def remove_sim_from_allowed_disallowed(self, sim):
        if sim in self._allowed_sims:
            self._allowed_sims.remove(sim)
        if sim in self._disallowed_sims:
            self._disallowed_sims.remove(sim)

    @property
    def violators(self):
        return self._violators

    def remove_violator(self, sim):
        self.remove_override_for_sim(sim)
        self._violators.discard(sim)

    @property
    def late_violators(self):
        return self._late_violators

    def remove_late_violator(self, sim):
        self.remove_override_for_sim(sim)
        self._late_violators.discard(sim)
Exemplo n.º 34
0
class Consumer(Service, ConsumerT):
    """Base Consumer."""

    app: AppT

    logger = logger

    #: Tuple of exception types that may be raised when the
    #: underlying consumer driver is stopped.
    consumer_stopped_errors: ClassVar[Tuple[Type[BaseException], ...]] = ()

    # Mapping of TP to list of gap in offsets.
    _gap: MutableMapping[TP, List[int]]

    # Mapping of TP to list of acked offsets.
    _acked: MutableMapping[TP, List[int]]

    #: Fast lookup to see if tp+offset was acked.
    _acked_index: MutableMapping[TP, Set[int]]

    #: Keeps track of the currently read offset in each TP
    _read_offset: MutableMapping[TP, Optional[int]]

    #: Keeps track of the currently committed offset in each TP.
    _committed_offset: MutableMapping[TP, Optional[int]]

    #: The consumer.wait_empty() method will set this to be notified
    #: when something acks a message.
    _waiting_for_ack: Optional[asyncio.Future] = None

    #: Used by .commit to ensure only one thread is comitting at a time.
    #: Other thread starting to commit while a commit is already active,
    #: will wait for the original request to finish, and do nothing.
    _commit_fut: Optional[asyncio.Future] = None

    #: Set of unacked messages: that is messages that we started processing
    #: and that we MUST attempt to complete processing of, before
    #: shutting down or resuming a rebalance.
    _unacked_messages: MutableSet[Message]

    #: Time of last record batch received.
    #: Set only when not set, and reset by commit() so actually
    #: tracks how long it ago it was since we received a record that
    #: was never committed.
    _last_batch: Counter[TP]

    #: Time of when the consumer was started.
    _time_start: float

    # How often to poll and track log end offsets.
    _end_offset_monitor_interval: float

    _commit_every: Optional[int]
    _n_acked: int = 0

    _active_partitions: Optional[Set[TP]]
    _paused_partitions: Set[TP]

    flow_active: bool = True
    can_resume_flow: Event

    def __init__(self,
                 transport: TransportT,
                 callback: ConsumerCallback,
                 on_partitions_revoked: PartitionsRevokedCallback,
                 on_partitions_assigned: PartitionsAssignedCallback,
                 *,
                 commit_interval: float = None,
                 commit_livelock_soft_timeout: float = None,
                 loop: asyncio.AbstractEventLoop = None,
                 **kwargs: Any) -> None:
        assert callback is not None
        self.transport = transport
        self.app = self.transport.app
        self.in_transaction = self.app.in_transaction
        self.callback = callback
        self._on_message_in = self.app.sensors.on_message_in
        self._on_partitions_revoked = on_partitions_revoked
        self._on_partitions_assigned = on_partitions_assigned
        self._commit_every = self.app.conf.broker_commit_every
        self.scheduler = self.app.conf.ConsumerScheduler()
        self.commit_interval = (commit_interval
                                or self.app.conf.broker_commit_interval)
        self.commit_livelock_soft_timeout = (
            commit_livelock_soft_timeout
            or self.app.conf.broker_commit_livelock_soft_timeout)
        self._gap = defaultdict(list)
        self._acked = defaultdict(list)
        self._acked_index = defaultdict(set)
        self._read_offset = defaultdict(lambda: None)
        self._committed_offset = defaultdict(lambda: None)
        self._unacked_messages = WeakSet()
        self._waiting_for_ack = None
        self._time_start = monotonic()
        self._last_batch = Counter()
        self._end_offset_monitor_interval = self.commit_interval * 2
        self.randomly_assigned_topics = set()
        self.can_resume_flow = Event()
        self._reset_state()
        super().__init__(loop=loop or self.transport.loop, **kwargs)
        self.transactions = self.transport.create_transaction_manager(
            consumer=self,
            producer=self.app.producer,
            beacon=self.beacon,
            loop=self.loop,
        )

    def on_init_dependencies(self) -> Iterable[ServiceT]:
        """Return list of services this consumer depends on."""
        # We start the TransactionManager only if
        # processing_guarantee='exactly_once'
        if self.in_transaction:
            return [self.transactions]
        return []

    def _reset_state(self) -> None:
        self._active_partitions = None
        self._paused_partitions = set()
        self.can_resume_flow.clear()
        self.flow_active = True
        self._last_batch.clear()
        self._time_start = monotonic()

    async def on_restart(self) -> None:
        """Call when the consumer is restarted."""
        self._reset_state()
        self.on_init()

    def _get_active_partitions(self) -> Set[TP]:
        tps = self._active_partitions
        if tps is None:
            return self._set_active_tps(self.assignment())
        assert all(isinstance(x, TP) for x in tps)
        return tps

    def _set_active_tps(self, tps: Set[TP]) -> Set[TP]:
        xtps = self._active_partitions = ensure_TPset(tps)  # copy
        xtps.difference_update(self._paused_partitions)
        return xtps

    @abc.abstractmethod
    async def _commit(self, offsets: Mapping[TP,
                                             int]) -> bool:  # pragma: no cover
        ...

    async def perform_seek(self) -> None:
        """Seek all partitions to their current committed position."""
        read_offset = self._read_offset
        _committed_offsets = await self.seek_to_committed()
        read_offset.update({
            tp: offset if offset is not None and offset >= 0 else None
            for tp, offset in _committed_offsets.items()
        })
        committed_offsets = {
            ensure_TP(tp): offset if offset else None
            for tp, offset in _committed_offsets.items() if offset is not None
        }
        self._committed_offset.update(committed_offsets)

    @abc.abstractmethod
    async def seek_to_committed(self) -> Mapping[TP, int]:
        """Seek all partitions to their committed offsets."""
        ...

    async def seek(self, partition: TP, offset: int) -> None:
        """Seek partition to specific offset."""
        self.log.dev('SEEK %r -> %r', partition, offset)
        # reset livelock detection
        self._last_batch.pop(partition, None)
        await self._seek(partition, offset)
        # set new read offset so we will reread messages
        self._read_offset[ensure_TP(partition)] = offset if offset else None

    @abc.abstractmethod
    async def _seek(self, partition: TP, offset: int) -> None:
        ...

    def stop_flow(self) -> None:
        """Block consumer from processing any more messages."""
        self.flow_active = False
        self.can_resume_flow.clear()

    def resume_flow(self) -> None:
        """Allow consumer to process messages."""
        self.flow_active = True
        self.can_resume_flow.set()

    def pause_partitions(self, tps: Iterable[TP]) -> None:
        """Pause fetching from partitions."""
        tpset = ensure_TPset(tps)
        self._get_active_partitions().difference_update(tpset)
        self._paused_partitions.update(tpset)

    def resume_partitions(self, tps: Iterable[TP]) -> None:
        """Resume fetching from partitions."""
        tpset = ensure_TPset(tps)
        self._get_active_partitions().update(tps)
        self._paused_partitions.difference_update(tpset)

    @abc.abstractmethod
    def _new_topicpartition(self, topic: str,
                            partition: int) -> TP:  # pragma: no cover
        ...

    def _is_changelog_tp(self, tp: TP) -> bool:
        return tp.topic in self.app.tables.changelog_topics

    @Service.transitions_to(CONSUMER_PARTITIONS_REVOKED)
    async def on_partitions_revoked(self, revoked: Set[TP]) -> None:
        """Call during rebalancing when partitions are being revoked."""
        # NOTE:
        # The ConsumerRebalanceListener is responsible for calling
        # app.on_rebalance_start(), and this must have happened
        # before we get to this point (see aiokafka implementation).
        span = self.app._start_span_from_rebalancing('on_partitions_revoked')
        T = traced_from_parent_span(span)
        with span:
            # see comment in on_partitions_assigned
            # remove revoked partitions from active + paused tps.
            if self._active_partitions is not None:
                self._active_partitions.difference_update(revoked)
            self._paused_partitions.difference_update(revoked)
            await T(self._on_partitions_revoked, partitions=revoked)(revoked)

    @Service.transitions_to(CONSUMER_PARTITIONS_ASSIGNED)
    async def on_partitions_assigned(self, assigned: Set[TP]) -> None:
        """Call during rebalancing when partitions are being assigned."""
        span = self.app._start_span_from_rebalancing('on_partitions_assigned')
        T = traced_from_parent_span(span)
        with span:
            # remove recently revoked tps from set of paused tps.
            self._paused_partitions.intersection_update(assigned)
            # cache set of assigned partitions
            self._set_active_tps(assigned)
            # start callback chain of assigned callbacks.
            #   need to copy set at this point, since we cannot have
            #   the callbacks mutate our active list.
            self._last_batch.clear()
            await T(self._on_partitions_assigned,
                    partitions=assigned)(assigned)
        self.app.on_rebalance_return()

    @abc.abstractmethod
    async def _getmany(self, active_partitions: Optional[Set[TP]],
                       timeout: float) -> RecordMap:
        ...

    async def getmany(self,
                      timeout: float) -> AsyncIterator[Tuple[TP, Message]]:
        """Fetch batch of messages from server."""
        # records' contain mapping from TP to list of messages.
        # if there are two agents, consuming from topics t1 and t2,
        # normal order of iteration would be to process each
        # tp in the dict:
        #    for tp. messages in records.items():
        #        for message in messages:
        #           yield tp, message
        #
        # The problem with this, is if we have prefetched 16k records
        # for one partition, the other partitions won't even start processing
        # before those 16k records are completed.
        #
        # So we try round-robin between the tps instead:
        #
        #    iterators: Dict[TP, Iterator] = {
        #        tp: iter(messages)
        #        for tp, messages in records.items()
        #    }
        #    while iterators:
        #        for tp, messages in iterators.items():
        #            yield tp, next(messages)
        #            # remove from iterators if empty.
        #
        # The problem with this implementation is that
        # the records mapping is ordered by TP, so records.keys()
        # will look like this:
        #
        #  TP(topic='bar', partition=0)
        #  TP(topic='bar', partition=1)
        #  TP(topic='bar', partition=2)
        #  TP(topic='bar', partition=3)
        #  TP(topic='foo', partition=0)
        #  TP(topic='foo', partition=1)
        #  TP(topic='foo', partition=2)
        #  TP(topic='foo', partition=3)
        #
        # If there are 100 partitions for each topic,
        # it will process 100 items in the first topic, then 100 items
        # in the other topic, but even worse if partition counts
        # vary greatly, t1 has 1000 partitions and t2
        # has 1 partition, then t2 will end up being starved most of the time.
        #
        # We solve this by going round-robin through each topic.
        records, active_partitions = await self._wait_next_records(timeout)
        if records is None or self.should_stop:
            return

        records_it = self.scheduler.iterate(records)
        to_message = self._to_message  # localize
        if self.flow_active:
            for tp, record in records_it:
                if not self.flow_active:
                    break
                if active_partitions is None or tp in active_partitions:
                    highwater_mark = self.highwater(tp)
                    self.app.monitor.track_tp_end_offset(tp, highwater_mark)
                    # convert timestamp to seconds from int milliseconds.
                    yield tp, to_message(tp, record)

    async def _wait_next_records(
            self,
            timeout: float) -> Tuple[Optional[RecordMap], Optional[Set[TP]]]:
        if not self.flow_active:
            await self.wait(self.can_resume_flow)
        # Implementation for the Fetcher service.

        is_client_only = self.app.client_only

        active_partitions: Optional[Set[TP]]
        if is_client_only:
            active_partitions = None
        else:
            active_partitions = self._get_active_partitions()

        records: RecordMap = {}
        if is_client_only or active_partitions:
            # Fetch records only if active partitions to avoid the risk of
            # fetching all partitions in the beginning when none of the
            # partitions is paused/resumed.
            records = await self._getmany(
                active_partitions=active_partitions,
                timeout=timeout,
            )
        else:
            # We should still release to the event loop
            await self.sleep(1)
        return records, active_partitions

    @abc.abstractmethod
    def _to_message(self, tp: TP, record: Any) -> ConsumerMessage:
        ...

    def track_message(self, message: Message) -> None:
        """Track message and mark it as pending ack."""
        # add to set of pending messages that must be acked for graceful
        # shutdown.  This is called by transport.Conductor,
        # before delivering messages to streams.
        self._unacked_messages.add(message)
        # call sensors
        self._on_message_in(message.tp, message.offset, message)

    def ack(self, message: Message) -> bool:
        """Mark message as being acknowledged by stream."""
        if not message.acked:
            message.acked = True
            tp = message.tp
            offset = message.offset
            if self.app.topics.acks_enabled_for(message.topic):
                committed = self._committed_offset[tp]
                try:
                    if committed is None or offset > committed:
                        acked_index = self._acked_index[tp]
                        if offset not in acked_index:
                            self._unacked_messages.discard(message)
                            acked_index.add(offset)
                            acked_for_tp = self._acked[tp]
                            acked_for_tp.append(offset)
                            self._n_acked += 1
                            return True
                finally:
                    notify(self._waiting_for_ack)
        return False

    async def _wait_for_ack(self, timeout: float) -> None:
        # arm future so that `ack()` can wake us up
        self._waiting_for_ack = asyncio.Future(loop=self.loop)
        try:
            # wait for `ack()` to wake us up
            await asyncio.wait_for(self._waiting_for_ack,
                                   loop=self.loop,
                                   timeout=1)
        except (asyncio.TimeoutError,
                asyncio.CancelledError):  # pragma: no cover
            pass
        finally:
            self._waiting_for_ack = None

    @Service.transitions_to(CONSUMER_WAIT_EMPTY)
    async def wait_empty(self) -> None:
        """Wait for all messages that started processing to be acked."""
        wait_count = 0
        T = traced_from_parent_span()
        while not self.should_stop and self._unacked_messages:
            wait_count += 1
            if not wait_count % 10:  # pragma: no cover
                remaining = [(m.refcount, m) for m in self._unacked_messages]
                self.log.warning('wait_empty: Waiting for %r tasks', remaining)
            self.log.dev('STILL WAITING FOR ALL STREAMS TO FINISH')
            self.log.dev('WAITING FOR %r EVENTS', len(self._unacked_messages))
            gc.collect()
            await T(self.commit)()
            if not self._unacked_messages:
                break
            await T(self._wait_for_ack)(timeout=1)

        self.log.dev('COMMITTING AGAIN AFTER STREAMS DONE')
        await T(self.commit_and_end_transactions)()

    async def commit_and_end_transactions(self) -> None:
        """Commit all safe offsets and end transaction."""
        await self.commit(start_new_transaction=False)

    async def on_stop(self) -> None:
        """Call when consumer is stopping."""
        if self.app.conf.stream_wait_empty:
            await self.wait_empty()
        else:
            await self.commit_and_end_transactions()

        self._last_batch.clear()

    @Service.task
    async def _commit_handler(self) -> None:
        interval = self.commit_interval

        await self.sleep(interval)
        async for sleep_time in self.itertimer(interval, name='commit'):
            await self.commit()

    @Service.task
    async def _commit_livelock_detector(self) -> None:  # pragma: no cover
        soft_timeout = self.commit_livelock_soft_timeout
        interval: float = self.commit_interval * 2.5
        acks_enabled_for = self.app.topics.acks_enabled_for
        await self.sleep(interval)
        async for sleep_time in self.itertimer(interval, name='livelock'):
            for tp, last_batch_time in self._last_batch.items():
                if last_batch_time and acks_enabled_for(tp.topic):
                    s_since_batch = monotonic() - last_batch_time
                    if s_since_batch > soft_timeout:
                        self.log.warning(
                            'Possible livelock: '
                            'COMMIT OFFSET NOT ADVANCING FOR %r', tp)

    async def commit(self,
                     topics: TPorTopicSet = None,
                     start_new_transaction: bool = True) -> bool:
        """Maybe commit the offset for all or specific topics.

        Arguments:
            topics: Set containing topics and/or TopicPartitions to commit.
        """
        if self.app.client_only:
            # client only cannot commit as consumer does not have group_id
            return False
        if await self.maybe_wait_for_commit_to_finish():
            # original commit finished, return False as we did not commit
            return False

        self._commit_fut = asyncio.Future(loop=self.loop)
        try:
            return await self.force_commit(
                topics,
                start_new_transaction=start_new_transaction,
            )
        finally:
            # set commit_fut to None so that next call will commit.
            fut, self._commit_fut = self._commit_fut, None
            # notify followers that the commit is done.
            notify(fut)

    async def maybe_wait_for_commit_to_finish(self) -> bool:
        """Wait for any existing commit operation to finish."""
        # Only one coroutine allowed to commit at a time,
        # and other coroutines should wait for the original commit to finish
        # then do nothing.
        if self._commit_fut is not None:
            # something is already committing so wait for that future.
            try:
                await self._commit_fut
            except asyncio.CancelledError:
                # if future is cancelled we have to start new commit
                pass
            else:
                return True
        return False

    @Service.transitions_to(CONSUMER_COMMITTING)
    async def force_commit(self,
                           topics: TPorTopicSet = None,
                           start_new_transaction: bool = True) -> bool:
        """Force offset commit."""
        sensor_state = self.app.sensors.on_commit_initiated(self)

        # Go over the ack list in each topic/partition
        commit_tps = list(self._filter_tps_with_pending_acks(topics))
        did_commit = await self._commit_tps(
            commit_tps, start_new_transaction=start_new_transaction)

        self.app.sensors.on_commit_completed(self, sensor_state)
        return did_commit

    async def _commit_tps(self, tps: Iterable[TP],
                          start_new_transaction: bool) -> bool:
        commit_offsets = self._filter_committable_offsets(tps)
        if commit_offsets:
            try:
                # send all messages attached to the new offset
                await self._handle_attached(commit_offsets)
            except ProducerSendError as exc:
                await self.crash(exc)
            else:
                return await self._commit_offsets(
                    commit_offsets,
                    start_new_transaction=start_new_transaction)
        return False

    def _filter_committable_offsets(self, tps: Iterable[TP]) -> Dict[TP, int]:
        commit_offsets = {}
        for tp in tps:
            # Find the latest offset we can commit in this tp
            offset = self._new_offset(tp)
            # check if we can commit to this offset
            if offset is not None and self._should_commit(tp, offset):
                commit_offsets[tp] = offset
        return commit_offsets

    async def _handle_attached(self, commit_offsets: Mapping[TP, int]) -> None:
        for tp, offset in commit_offsets.items():
            app = cast(_App, self.app)
            attachments = app._attachments
            producer = app.producer
            # Start publishing the messages and return a list of pending
            # futures.
            pending = await attachments.publish_for_tp_offset(tp, offset)
            # then we wait for either
            #  1) all the attached messages to be published, or
            #  2) the producer crashing
            #
            # If the producer crashes we will not be able to send any messages
            # and it only crashes when there's an irrecoverable error.
            #
            # If we cannot commit it means the events will be processed again,
            # so conforms to at-least-once semantics.
            if pending:
                await producer.wait_many(pending)

    async def _commit_offsets(self,
                              offsets: Mapping[TP, int],
                              start_new_transaction: bool = True) -> bool:
        table = terminal.logtable(
            [(str(tp), str(offset)) for tp, offset in offsets.items()],
            title='Commit Offsets',
            headers=['TP', 'Offset'],
        )
        self.log.dev('COMMITTING OFFSETS:\n%s', table)
        assignment = self.assignment()
        committable_offsets: Dict[TP, int] = {}
        revoked: Dict[TP, int] = {}
        for tp, offset in offsets.items():
            if tp in assignment:
                committable_offsets[tp] = offset
            else:
                revoked[tp] = offset
        if revoked:
            self.log.info(
                'Discarded commit for revoked partitions that '
                'will be eventually processed again: %r',
                revoked,
            )
        if not committable_offsets:
            return False
        with flight_recorder(self.log, timeout=300.0) as on_timeout:
            did_commit = False
            on_timeout.info('+consumer.commit()')
            if self.in_transaction:
                did_commit = await self.transactions.commit(
                    committable_offsets,
                    start_new_transaction=start_new_transaction,
                )
            else:
                did_commit = await self._commit(committable_offsets)
            on_timeout.info('-consumer.commit()')
            if did_commit:
                on_timeout.info('+tables.on_commit')
                self.app.tables.on_commit(committable_offsets)
                on_timeout.info('-tables.on_commit')
        self._committed_offset.update(committable_offsets)
        self.app.monitor.on_tp_commit(committable_offsets)
        for tp in offsets:
            self._last_batch.pop(tp, None)
        return did_commit

    def _filter_tps_with_pending_acks(self,
                                      topics: TPorTopicSet = None
                                      ) -> Iterator[TP]:
        return (tp for tp in self._acked
                if topics is None or tp in topics or tp.topic in topics)

    def _should_commit(self, tp: TP, offset: int) -> bool:
        committed = self._committed_offset[tp]
        return committed is None or bool(offset) and offset > committed

    def _new_offset(self, tp: TP) -> Optional[int]:
        # get the new offset for this tp, by going through
        # its list of acked messages.
        acked = self._acked[tp]

        # We iterate over it until we find a gap
        # then return the offset before that.
        # For example if acked[tp] is:
        #   1 2 3 4 5 6 7 8 9
        # the return value will be: 9
        # If acked[tp] is:
        #  34 35 36 40 41 42 43 44
        #          ^--- gap
        # the return value will be: 36
        if acked:
            max_offset = max(acked)
            gap_for_tp = self._gap[tp]
            if gap_for_tp:
                gap_index = next(
                    (i for i, x in enumerate(gap_for_tp) if x > max_offset),
                    len(gap_for_tp))
                gaps = gap_for_tp[:gap_index]
                acked.extend(gaps)
                gap_for_tp[:gap_index] = []
            acked.sort()
            # Note: acked is always kept sorted.
            # find first list of consecutive numbers
            batch = next(consecutive_numbers(acked))
            # remove them from the list to clean up.
            acked[:len(batch) - 1] = []
            self._acked_index[tp].difference_update(batch)
            # return the highest commit offset
            return batch[-1]
        return None

    async def on_task_error(self, exc: BaseException) -> None:
        """Call when processing a message failed."""
        await self.commit()

    def _add_gap(self, tp: TP, offset_from: int, offset_to: int) -> None:
        committed = self._committed_offset[tp]
        gap_for_tp = self._gap[tp]
        for offset in range(offset_from, offset_to):
            if committed is None or offset > committed:
                gap_for_tp.append(offset)

    async def _drain_messages(self,
                              fetcher: ServiceT) -> None:  # pragma: no cover
        # This is the background thread started by Fetcher, used to
        # constantly read messages using Consumer.getmany.
        # It takes Fetcher as argument, because we must be able to
        # stop it using `await Fetcher.stop()`.
        callback = self.callback
        getmany = self.getmany
        consumer_should_stop = self._stopped.is_set
        fetcher_should_stop = fetcher._stopped.is_set

        get_read_offset = self._read_offset.__getitem__
        set_read_offset = self._read_offset.__setitem__
        get_commit_offset = self._committed_offset.__getitem__
        flag_consumer_fetching = CONSUMER_FETCHING
        set_flag = self.diag.set_flag
        unset_flag = self.diag.unset_flag
        commit_every = self._commit_every
        acks_enabled_for = self.app.topics.acks_enabled_for

        try:
            while not (consumer_should_stop() or fetcher_should_stop()):
                set_flag(flag_consumer_fetching)
                ait = cast(AsyncIterator, getmany(timeout=5.0))
                last_batch = self._last_batch

                # Sleeping because sometimes getmany is called in a loop
                # never releasing to the event loop
                await self.sleep(0)
                if not self.should_stop:
                    async for tp, message in ait:
                        offset = message.offset
                        r_offset = get_read_offset(tp)
                        committed_offset = get_commit_offset(tp)
                        if committed_offset != r_offset:
                            last_batch[tp] = monotonic()
                        if r_offset is None or offset > r_offset:
                            gap = offset - (r_offset or 0)
                            # We have a gap in income messages
                            if gap > 1 and r_offset:
                                acks_enabled = acks_enabled_for(message.topic)
                                if acks_enabled:
                                    self._add_gap(tp, r_offset + 1, offset)
                            if commit_every is not None:
                                if self._n_acked >= commit_every:
                                    self._n_acked = 0
                                    await self.commit()
                            await callback(message)
                            set_read_offset(tp, offset)
                        else:
                            self.log.dev('DROPPED MESSAGE ROFF %r: k=%r v=%r',
                                         offset, message.key, message.value)
                    unset_flag(flag_consumer_fetching)

        except self.consumer_stopped_errors:
            if self.transport.app.should_stop:
                # we're already stopping so ignore
                self.log.info('Broker stopped consumer, shutting down...')
                return
            raise
        except asyncio.CancelledError:
            if self.transport.app.should_stop:
                # we're already stopping so ignore
                self.log.info('Consumer shutting down for user cancel.')
                return
            raise
        except Exception as exc:
            self.log.exception('Drain messages raised: %r', exc)
            raise
        finally:
            unset_flag(flag_consumer_fetching)

    def close(self) -> None:
        """Close consumer for graceful shutdown."""
        ...

    @property
    def unacked(self) -> Set[Message]:
        """Return the set of currently unacknowledged messages."""
        return cast(Set[Message], self._unacked_messages)
Exemplo n.º 35
0
class Consumer(Service, ConsumerT):
    """Base Consumer."""

    app: AppT

    logger = logger

    #: Tuple of exception types that may be raised when the
    #: underlying consumer driver is stopped.
    consumer_stopped_errors: ClassVar[Tuple[Type[BaseException], ...]] = ()

    # Mapping of TP to list of acked offsets.
    _acked: MutableMapping[TP, List[int]]

    #: Fast lookup to see if tp+offset was acked.
    _acked_index: MutableMapping[TP, Set[int]]

    #: Keeps track of the currently read offset in each TP
    _read_offset: MutableMapping[TP, Optional[int]]

    #: Keeps track of the currently commited offset in each TP.
    _committed_offset: MutableMapping[TP, Optional[int]]

    #: The consumer.wait_empty() method will set this to be notified
    #: when something acks a message.
    _waiting_for_ack: Optional[asyncio.Future] = None

    #: Used by .commit to ensure only one thread is comitting at a time.
    #: Other thread starting to commit while a commit is already active,
    #: will wait for the original request to finish, and do nothing.
    _commit_fut: Optional[asyncio.Future] = None

    #: Set of unacked messages: that is messages that we started processing
    #: and that we MUST attempt to complete processing of, before
    #: shutting down or resuming a rebalance.
    _unacked_messages: MutableSet[Message]

    #: Time of last record batch received.
    #: Set only when not set, and reset by commit() so actually
    #: tracks how long it ago it was since we received a record that
    #: was never committed.
    _last_batch: Optional[float]

    #: Time of when the consumer was started.
    _time_start: float

    # How often to poll and track log end offsets.
    _end_offset_monitor_interval: float

    _commit_every: Optional[int]
    _n_acked: int = 0

    def __init__(self,
                 transport: TransportT,
                 callback: ConsumerCallback,
                 on_partitions_revoked: PartitionsRevokedCallback,
                 on_partitions_assigned: PartitionsAssignedCallback,
                 *,
                 commit_interval: float = None,
                 commit_livelock_soft_timeout: float = None,
                 loop: asyncio.AbstractEventLoop = None,
                 **kwargs: Any) -> None:
        assert callback is not None
        self.transport = transport
        self.app = self.transport.app
        self.callback = callback
        self._on_message_in = self.app.sensors.on_message_in
        self._on_partitions_revoked = on_partitions_revoked
        self._on_partitions_assigned = on_partitions_assigned
        self._commit_every = self.app.conf.broker_commit_every
        self.commit_interval = (commit_interval
                                or self.app.conf.broker_commit_interval)
        self.commit_livelock_soft_timeout = (
            commit_livelock_soft_timeout
            or self.app.conf.broker_commit_livelock_soft_timeout)
        self._acked = defaultdict(list)
        self._acked_index = defaultdict(set)
        self._read_offset = defaultdict(lambda: None)
        self._committed_offset = defaultdict(lambda: None)
        self._unacked_messages = WeakSet()
        self._waiting_for_ack = None
        self._time_start = monotonic()
        self._last_batch = None
        self._end_offset_monitor_interval = self.commit_interval * 2
        self.randomly_assigned_topics = set()
        super().__init__(loop=loop or self.transport.loop, **kwargs)

    @abc.abstractmethod
    async def _commit(
            self,
            offsets: Mapping[TP, Tuple[int, str]]) -> bool:  # pragma: no cover
        ...

    @abc.abstractmethod
    def _new_topicpartition(self, topic: str,
                            partition: int) -> TP:  # pragma: no cover
        ...

    def _is_changelog_tp(self, tp: TP) -> bool:
        return tp.topic in self.app.tables.changelog_topics

    @Service.transitions_to(CONSUMER_PARTITIONS_ASSIGNED)
    async def on_partitions_assigned(self, assigned: Set[TP]) -> None:
        await self._on_partitions_assigned(assigned)

    @Service.transitions_to(CONSUMER_PARTITIONS_REVOKED)
    async def on_partitions_revoked(self, revoked: Set[TP]) -> None:
        await self._on_partitions_revoked(revoked)

    def track_message(self, message: Message) -> None:
        # add to set of pending messages that must be acked for graceful
        # shutdown.  This is called by transport.Conductor,
        # before delivering messages to streams.
        self._unacked_messages.add(message)
        # call sensors
        self._on_message_in(message.tp, message.offset, message)

    def ack(self, message: Message) -> bool:
        if not message.acked:
            message.acked = True
            tp = message.tp
            offset = message.offset
            if self.app.topics.acks_enabled_for(message.topic):
                committed = self._committed_offset[tp]
                try:
                    if committed is None or offset > committed:
                        acked_index = self._acked_index[tp]
                        if offset not in acked_index:
                            self._unacked_messages.discard(message)
                            acked_index.add(offset)
                            acked_for_tp = self._acked[tp]
                            acked_for_tp.append(offset)
                            self._n_acked += 1
                            return True
                finally:
                    notify(self._waiting_for_ack)
        return False

    @Service.transitions_to(CONSUMER_WAIT_EMPTY)
    async def wait_empty(self) -> None:
        """Wait for all messages that started processing to be acked."""
        wait_count = 0
        while not self.should_stop and self._unacked_messages:
            wait_count += 1
            if not wait_count % 100_000:  # pragma: no cover
                remaining = [(m.refcount, m) for m in self._unacked_messages]
                self.log.warn(f'Waiting for {remaining}')
            self.log.dev('STILL WAITING FOR ALL STREAMS TO FINISH')
            self.log.dev('WAITING FOR %r EVENTS', len(self._unacked_messages))
            gc.collect()
            await self.commit()
            if not self._unacked_messages:
                break

            # arm future so that `ack()` can wake us up
            self._waiting_for_ack = asyncio.Future(loop=self.loop)
            try:
                # wait for `ack()` to wake us up
                await asyncio.wait_for(self._waiting_for_ack,
                                       loop=self.loop,
                                       timeout=1)
            except (asyncio.TimeoutError,
                    asyncio.CancelledError):  # pragma: no cover
                pass
            finally:
                self._waiting_for_ack = None
        self.log.dev('COMMITTING AGAIN AFTER STREAMS DONE')
        await self.commit()

    async def on_stop(self) -> None:
        if self.app.conf.stream_wait_empty:
            await self.wait_empty()
        self._last_batch = None

    @Service.task
    async def _commit_handler(self) -> None:
        await self.sleep(self.commit_interval)
        while not self.should_stop:
            await self.commit()
            await self.sleep(self.commit_interval)

    @Service.task
    async def _commit_livelock_detector(self) -> None:  # pragma: no cover
        soft_timeout = self.commit_livelock_soft_timeout
        interval: float = self.commit_interval * 2.5
        await self.sleep(interval)
        while not self.should_stop:
            if self._last_batch is not None:
                s_since_batch = monotonic() - self._last_batch
                if s_since_batch > soft_timeout:
                    self.log.warn(
                        'Possible livelock: COMMIT OFFSET NOT ADVANCING')
            await self.sleep(interval)

    async def commit(self,
                     topics: TPorTopicSet = None) -> bool:  # pragma: no cover
        """Maybe commit the offset for all or specific topics.

        Arguments:
            topics: Set containing topics and/or TopicPartitions to commit.
        """
        if await self.maybe_wait_for_commit_to_finish():
            # original commit finished, return False as we did not commit
            return False

        self._commit_fut = asyncio.Future(loop=self.loop)
        try:
            return await self.force_commit(topics)
        finally:
            # set commit_fut to None so that next call will commit.
            fut, self._commit_fut = self._commit_fut, None
            # notify followers that the commit is done.
            if fut is not None and not fut.done():
                fut.set_result(None)

    async def maybe_wait_for_commit_to_finish(self) -> bool:
        # Only one coroutine allowed to commit at a time,
        # and other coroutines should wait for the original commit to finish
        # then do nothing.
        if self._commit_fut is not None:
            # something is already committing so wait for that future.
            try:
                await self._commit_fut
            except asyncio.CancelledError:
                # if future is cancelled we have to start new commit
                pass
            else:
                return True
        return False

    @Service.transitions_to(CONSUMER_COMMITTING)
    async def force_commit(self, topics: TPorTopicSet = None) -> bool:
        sensor_state = self.app.sensors.on_commit_initiated(self)

        # Go over the ack list in each topic/partition
        commit_tps = list(self._filter_tps_with_pending_acks(topics))
        did_commit = await self._commit_tps(commit_tps)

        self.app.sensors.on_commit_completed(self, sensor_state)
        return did_commit

    async def _commit_tps(self, tps: Iterable[TP]) -> bool:
        commit_offsets = self._filter_committable_offsets(tps)
        if commit_offsets:
            try:
                # send all messages attached to the new offset
                await self._handle_attached(commit_offsets)
            except ProducerSendError as exc:
                await self.crash(exc)
            else:
                return await self._commit_offsets(commit_offsets)
        return False

    def _filter_committable_offsets(self, tps: Iterable[TP]) -> Dict[TP, int]:
        commit_offsets = {}
        for tp in tps:
            # Find the latest offset we can commit in this tp
            offset = self._new_offset(tp)
            # check if we can commit to this offset
            if offset is not None and self._should_commit(tp, offset):
                commit_offsets[tp] = offset
        return commit_offsets

    async def _handle_attached(self, commit_offsets: Mapping[TP, int]) -> None:
        for tp, offset in commit_offsets.items():
            app = cast(App, self.app)
            attachments = app._attachments
            producer = app.producer
            # Start publishing the messages and return a list of pending
            # futures.
            pending = await attachments.publish_for_tp_offset(tp, offset)
            # then we wait for either
            #  1) all the attached messages to be published, or
            #  2) the producer crashing
            #
            # If the producer crashes we will not be able to send any messages
            # and it only crashes when there's an irrecoverable error.
            #
            # If we cannot commit it means the events will be processed again,
            # so conforms to at-least-once semantics.
            if pending:
                await producer.wait_many(pending)

    async def _commit_offsets(self, commit_offsets: Mapping[TP, int]) -> bool:
        meta = ''
        return await self._commit(
            {tp: (offset, meta)
             for tp, offset in commit_offsets.items()})

    def _filter_tps_with_pending_acks(self,
                                      topics: TPorTopicSet = None
                                      ) -> Iterator[TP]:
        return (tp for tp in self._acked
                if topics is None or tp in topics or tp.topic in topics)

    def _should_commit(self, tp: TP, offset: int) -> bool:
        committed = self._committed_offset[tp]
        return committed is None or bool(offset) and offset > committed

    def _new_offset(self, tp: TP) -> Optional[int]:
        # get the new offset for this tp, by going through
        # its list of acked messages.
        acked = self._acked[tp]

        # We iterate over it until we find a gap
        # then return the offset before that.
        # For example if acked[tp] is:
        #   1 2 3 4 5 6 7 8 9
        # the return value will be: 9
        # If acked[tp] is:
        #  34 35 36 40 41 42 43 44
        #          ^--- gap
        # the return value will be: 36
        if acked:
            acked.sort()
            # Note: acked is always kept sorted.
            # find first list of consecutive numbers
            batch = next(consecutive_numbers(acked))
            # remove them from the list to clean up.
            acked[:len(batch)] = []
            self._acked_index[tp].difference_update(batch)
            # return the highest commit offset
            return batch[-1]
        return None

    async def on_task_error(self, exc: BaseException) -> None:
        await self.commit()

    async def _drain_messages(self,
                              fetcher: ServiceT) -> None:  # pragma: no cover
        # This is the background thread started by Fetcher, used to
        # constantly read messages using Consumer.getmany.
        # It takes Fetcher as argument, because we must be able to
        # stop it using `await Fetcher.stop()`.
        callback = self.callback
        getmany = self.getmany
        consumer_should_stop = self._stopped.is_set
        fetcher_should_stop = fetcher._stopped.is_set

        get_read_offset = self._read_offset.__getitem__
        set_read_offset = self._read_offset.__setitem__
        flag_consumer_fetching = CONSUMER_FETCHING
        set_flag = self.diag.set_flag
        unset_flag = self.diag.unset_flag
        commit_every = self._commit_every

        try:
            while not (consumer_should_stop() or fetcher_should_stop()):
                set_flag(flag_consumer_fetching)
                ait = cast(AsyncIterator, getmany(timeout=5.0))
                # Sleeping because sometimes getmany is called in a loop
                # never releasing to the event loop
                await self.sleep(0)
                if not self.should_stop:
                    async for tp, message in ait:
                        offset = message.offset
                        r_offset = get_read_offset(tp)
                        if r_offset is None or offset > r_offset:
                            if commit_every is not None:
                                if self._n_acked >= commit_every:
                                    self._n_acked = 0
                                    await self.commit()
                            await callback(message)
                            set_read_offset(tp, offset)
                        else:
                            self.log.dev('DROPPED MESSAGE ROFF %r: k=%r v=%r',
                                         offset, message.key, message.value)
                    unset_flag(flag_consumer_fetching)

        except self.consumer_stopped_errors:
            if self.transport.app.should_stop:
                # we're already stopping so ignore
                self.log.info('Broker stopped consumer, shutting down...')
                return
            raise
        except asyncio.CancelledError:
            if self.transport.app.should_stop:
                # we're already stopping so ignore
                self.log.info('Consumer shutting down for user cancel.')
                return
            raise
        except Exception as exc:
            self.log.exception('Drain messages raised: %r', exc)
            raise
        finally:
            unset_flag(flag_consumer_fetching)

    def close(self) -> None:
        ...

    @property
    def unacked(self) -> Set[Message]:
        return cast(Set[Message], self._unacked_messages)
Exemplo n.º 36
0
class ConnectionPool(object):
    """A pool of :class:`psycopg2:connection` objects.

    .. attribute:: minconn

        The minimum number of connections to keep in the pool. By default one
        connection is opened when the pool is created.

    .. attribute:: maxconn

        The maximum number of connections in the pool. By default the pool will
        attempt to open as many connections as requested.

    .. attribute:: idle_timeout

        How many seconds to keep an idle connection before closing it. The
        default value causes idle connections to be closed after 10 minutes
        (approximately, it depends on :meth:`putconn` being called).

    .. attribute:: connect_kwargs

        The keyword arguments to pass to :func:`psycopg2.connect`. If the `dsn`
        argument isn't specified, then it's set to an empty string by default.

    The following attributes are internal, they're documented here to provide
    insight into how the pool works.

    .. attribute:: connections_in_use

        The set of connections that have been checked out of the pool through
        :meth:`getconn`. Type: :class:`weakref.WeakSet`.

    .. attribute:: idle_connections

        The pool of unused connections, last in first out.
        Type: :class:`collections.deque`.

    .. attribute:: return_times

        A timestamp is stored in this dict when a connection is added to
        :attr:`.idle_connections`. That timestamp is used in :meth:`getconn` to
        compute how long the connection stayed idle in the pool.
        Type: :class:`dict`.

    This class provides two main methods (:meth:`getconn` and :meth:`putconn`),
    plus another one that you probably don't need (:meth:`clear`).

    """

    __slots__ = ('minconn', 'maxconn', 'idle_timeout', 'connect_kwargs',
                 'idle_connections', 'connections_in_use', 'return_times',
                 '__dict__')

    def __init__(self,
                 minconn=1,
                 maxconn=float('inf'),
                 idle_timeout=600,
                 **connect_kwargs):
        self.minconn = minconn
        self.maxconn = maxconn
        self.idle_timeout = idle_timeout
        connect_kwargs.setdefault('dsn', '')
        self.connect_kwargs = connect_kwargs

        self.connections_in_use = WeakSet()
        self.idle_connections = deque()
        self.return_times = {}

        for i in range(self.minconn):
            self._connect()

    def _connect(self, for_immediate_use=False):
        """Open a new connection.
        """
        conn = psycopg2._connect(**self.connect_kwargs)
        if for_immediate_use:
            self.connections_in_use.add(conn)
        else:
            self.return_times[conn] = uptime()
            self.idle_connections.append(conn)
        return conn

    def getconn(self):
        """Get a connection from the pool.

        If there is no idle connection available, then a new one is opened;
        unless there are already :attr:`.maxconn` connections open, then a
        :class:`PoolError` exception is raised.

        Any connection that is broken, or has been idle for more than
        :attr:`.idle_timeout` seconds, is closed and discarded.
        """
        while True:
            try:
                # Attempt to take an idle connection from the pool.
                conn = self.idle_connections.pop()
            except IndexError:
                # We don't have any idle connection available, open a new one.
                if len(self.connections_in_use) >= self.maxconn:
                    raise PoolError("connection pool exhausted")
                conn = self._connect(for_immediate_use=True)
            else:
                # Close and discard the connection if it's broken or too old.
                idle_since = self.return_times.pop(conn, 0)
                close = (conn.info.transaction_status !=
                         _ext.TRANSACTION_STATUS_IDLE
                         or self.idle_timeout and idle_since <
                         (uptime() - self.idle_timeout))
                if close:
                    conn.close()
                    continue
            break
        return conn

    def putconn(self, conn):
        """Return a connection to the pool.

        You should always return a connection to the pool, even if you've closed
        it. That being said, the pool only holds weak references to connections
        returned by :meth:`getconn`, so they should be garbage collected even if
        you fail to return them.
        """
        self.connections_in_use.discard(conn)

        # Determine if the connection should be kept or discarded.
        current_time = uptime()
        if self.idle_timeout == 0 and len(
                self.idle_connections) >= self.minconn:
            conn.close()
        else:
            status = conn.info.transaction_status
            if status == _ext.TRANSACTION_STATUS_UNKNOWN:
                # The connection is broken, discard it.
                conn.close()
            else:
                if status != _ext.TRANSACTION_STATUS_IDLE:
                    # The connection is still in a transaction, roll it back.
                    conn.rollback()
                self.return_times[conn] = current_time
                self.idle_connections.append(conn)

        # Clean up the idle connections.
        if self.idle_timeout:
            # We cap the number of iterations to ensure that we don't end up in
            # an infinite loop.
            for i in range(len(self.idle_connections)):
                try:
                    conn = self.idle_connections[0]
                except IndexError:
                    break
                return_time = self.return_times.get(conn)
                if return_time is None:
                    # The connection's return time is missing, give up.
                    break
                if return_time < (current_time - self.idle_timeout):
                    # This connection has been idle too long, attempt to drop it.
                    try:
                        popped_conn = self.idle_connections.popleft()
                    except IndexError:
                        # Another thread removed this connection from the queue.
                        continue
                    if popped_conn == conn:
                        # Okay, we can close and discard this connection.
                        self.return_times.pop(conn, None)
                        conn.close()
                    else:
                        # We got a different connection, put it back.
                        self.idle_connections.appendleft(popped_conn)
                    continue
                else:
                    # The leftmost connection isn't too old, so we can assume
                    # that the other ones aren't either.
                    break

        # Open new connections if we've dropped below minconn.
        while (len(self.idle_connections) +
               len(self.connections_in_use)) < self.minconn:
            self._connect()

    def clear(self):
        """Close and discard all idle connections in the pool (regardless of the
        values of :attr:`.minconn` and :attr:`.idle_timeout`).

        This method could be useful if you have periods of high activity that
        result in many connections being opened, followed by prolonged periods
        with zero activity (no calls to :meth:`getconn` or :meth:`putconn`),
        *and* you care about closing those extraneous connections during the
        inactivity period. It's up to you to call this method in that case.

        Alternatively you may want to run a cron task to `close idle connections
        from the server <https://stackoverflow.com/a/30769511/>`_.
        """
        for conn in list(self.idle_connections):
            try:
                self.idle_connections.remove(conn)
            except ValueError:
                continue
            self.return_times.pop(conn, None)
            conn.close()
Exemplo n.º 37
0
class SwitchboardManager(gobject.GObject):
    """Switchboard management

        @undocumented: do_get_property, do_set_property
        @group Handlers: _handle_*, _default_handler, _error_handler"""
    __gsignals__ = {
            "handler-created": (gobject.SIGNAL_RUN_FIRST,
                gobject.TYPE_NONE,
                (object, object))
            }

    def __init__(self, client):
        """Initializer

            @param client: the main Client instance"""
        gobject.GObject.__init__(self)
        self._client = weakref.proxy(client)

        self._reset()
        self._handlers_class = set()

        self._client._protocol.connect("switchboard-invitation-received",
                self._ns_switchboard_invite)

    def _reset(self):
        self._switchboards = {}
        self._orphaned_switchboards = set()
        self._requested_switchboards = {}
        self._pending_switchboards = {}
        self._orphaned_handlers = WeakSet()

    def close(self):
        for switchboard in self._orphaned_switchboards:
            switchboard.leave()
        for switchboard in self._pending_switchboards:
            switchboard.leave()
        for switchboard in self._switchboards:
            switchboard.leave()
        self._reset()

    def register_handler_class(self, handler_class, *extra_arguments):
        self._handlers_class.add((handler_class, extra_arguments))

    def register_handler(self, handler):
        self._orphaned_handlers.add(handler)

    def request_switchboard(self, handler, priority=99):
        handler_participants = handler.total_participants
        participants = ", ".join(map(lambda c: c.account, handler_participants))
        logger.info("Requesting switchboard for participant(s) %s" % participants)

        # If the Handler was orphan, then it is no more
        self._orphaned_handlers.discard(handler)

        # Check already open switchboards
        for switchboard in self._switchboards.keys():
            switchboard_participants = set(switchboard.participants.values())
            if handler_participants == switchboard_participants and \
               (switchboard.state == msnp.ProtocolState.OPEN or \
                switchboard.state == msnp.ProtocolState.OPENING):
                logger.info("Using already opened switchboard %s" %
                        switchboard.session_id)
                self._switchboards[switchboard].add(handler)
                handler._switchboard = switchboard
                return

        # Check Orphaned switchboards
        for switchboard in list(self._orphaned_switchboards):
            switchboard_participants = set(switchboard.participants.values())
            if handler_participants == switchboard_participants and \
               (switchboard.state == msnp.ProtocolState.OPEN or \
                switchboard.state == msnp.ProtocolState.OPENING):
                logger.info("Using orphaned switchboard %s" %
                        switchboard.session_id)
                self._switchboards[switchboard] = set([handler]) #FIXME: WeakSet ?
                self._orphaned_switchboards.discard(switchboard)
                handler._switchboard = switchboard
                return

        # Check pending switchboards
        for switchboard, handlers in self._pending_switchboards.iteritems():
            pending_handler = handlers.pop()
            handlers.add(pending_handler)
            switchboard_participants = pending_handler.total_participants
            if handler_participants == switchboard_participants:
                self._pending_switchboards[switchboard].add(handler)
                logger.info("Using pending switchboard")
                return

        # Check switchboards being requested for same participants
        if participants in self._requested_switchboards:
            self._requested_switchboards[participants].add(handler)
            logger.info("Using already requested switchboard for same contacts")
            return

        logger.info("Requesting new switchboard")
        self._requested_switchboards[participants] = set([handler])
        self._client._protocol.request_switchboard(priority,
                (self._ns_switchboard_request_response, participants))

    def close_handler(self, handler):
        logger.info("Closing switchboard handler %s" % repr(handler))
        self._orphaned_handlers.discard(handler)
        handler._on_closed()
        for switchboard in self._switchboards.keys():
            handlers = self._switchboards[switchboard]
            handlers.discard(handler)
            if len(handlers) == 0:
                switchboard.leave()
                del self._switchboards[switchboard]
                self._orphaned_switchboards.add(switchboard)

        for switchboard in self._pending_switchboards.keys():
            handlers = self._pending_switchboards[switchboard]
            handlers.discard(handler)
            if len(handlers) == 0:
                del self._pending_switchboards[switchboard]
                self._orphaned_switchboards.add(switchboard)

    def _ns_switchboard_request_response(self, session, participants):
        switchboard = self._build_switchboard(session)
        handlers = self._requested_switchboards.pop(participants, set())
        self._pending_switchboards[switchboard] = handlers

    def _ns_switchboard_invite(self, protocol, session, inviter):
        switchboard = self._build_switchboard(session)
        self._orphaned_switchboards.add(switchboard)

    def _build_switchboard(self, session):
        server, session_id, key = session
        client = self._client
        proxies = client._proxies

        transport_class = client._transport_class
        transport = transport_class(server, ServerType.SWITCHBOARD, proxies)
        switchboard = msnp.SwitchboardProtocol(client, transport,
                session_id, key, proxies)
        switchboard.connect("notify::state", self._sb_state_changed)
        switchboard.connect("message-received", self._sb_message_received)
        transport.establish_connection()
        return switchboard

    def _sb_state_changed(self, switchboard, param_spec):
        state = switchboard.state
        if state == msnp.ProtocolState.OPEN:
            self._switchboards[switchboard] = set() #FIXME: WeakSet ?

            # Requested switchboards
            if switchboard in self._pending_switchboards:
                handlers = self._pending_switchboards[switchboard]
                while True:
                    try:
                        handler = handlers.pop()
                        self._switchboards[switchboard].add(handler)
                        handler._switchboard = switchboard
                    except KeyError:
                        break
                del self._pending_switchboards[switchboard]

            # Orphaned Handlers
            for handler in list(self._orphaned_handlers):
                switchboard_participants = set(switchboard.participants.values())
                handler_participants = handler.total_participants
                if handler_participants == switchboard_participants:
                    self._switchboards[switchboard].add(handler)
                    self._orphaned_handlers.discard(handler)
                    self._orphaned_switchboards.discard(switchboard)
                    handler._switchboard = switchboard

            # no one wants it, it is an orphan
            if len(self._switchboards[switchboard]) == 0:
                del self._switchboards[switchboard]
                self._orphaned_switchboards.add(switchboard)

        elif state == msnp.ProtocolState.CLOSED:
            if switchboard in self._switchboards.keys():
                for handler in self._switchboards[switchboard]:
                    self._orphaned_handlers.add(handler)
                    handler._on_switchboard_closed()
                del self._switchboards[switchboard]
            self._orphaned_switchboards.discard(switchboard)

    def _sb_message_received(self, switchboard, message):
        switchboard_participants = set(switchboard.participants.values())

        # Get current handlers for this switchboard
        if switchboard in self._switchboards.keys():
            handlers = self._switchboards[switchboard]
            handlers_class = [type(handler) for handler in handlers]
        elif switchboard in list(self._orphaned_switchboards):
            handlers = set() #FIXME: WeakSet ?
            handlers_class = []
            self._switchboards[switchboard] = handlers
        else:
            logger.warning("Message received on unknown switchboard")
            return

        # Signal message to existing handlers
        for handler in list(handlers):
            if not handler._can_handle_message(message, handler):
                continue
            handler._on_message_received(message)
            return
        # Create first handler that could handle this message
        for handler_class, extra_args in self._handlers_class:
            if not handler_class._can_handle_message(message):
                continue
            handler = handler_class.handle_message(self._client,
                    message, *extra_args)
            if handler is None:
                continue
            self._orphaned_handlers.discard(handler)
            self._orphaned_switchboards.discard(switchboard)
            handlers.add(handler)
            handler._switchboard = switchboard
            self.emit("handler-created", handler_class, handler)
            handler._on_message_received(message)
            return