def test_weak_destroy_and_mutate_while_iterating(self): items = [ustr(c) for c in string.ascii_letters] s = WeakSet(items) @contextlib.contextmanager def testcontext(): try: it = iter(s) yielded = ustr(str(next(it))) u = ustr(str(items.pop())) if yielded == u: next(it) gc.collect() yield u finally: it = None with testcontext() as u: self.assertNotIn(u, s) with testcontext() as u: self.assertRaises(KeyError, s.remove, u) self.assertNotIn(u, s) with testcontext() as u: s.add(u) self.assertIn(u, s) t = s.copy() with testcontext() as u: s.update(t) self.assertEqual(len(s), len(t)) with testcontext() as u: s.clear() self.assertEqual(len(s), 0)
def test_weak_destroy_and_mutate_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed items = [SomeClass(c) for c in string.ascii_letters] s = WeakSet(items) @contextlib.contextmanager def testcontext(): try: it = iter(s) next(it) # Schedule an item for removal and recreate it u = SomeClass(str(items.pop())) test_support.gc_collect() # just in case yield u finally: it = None # should commit all removals test_support.gc_collect() with testcontext() as u: self.assertNotIn(u, s) with testcontext() as u: self.assertRaises(KeyError, s.remove, u) self.assertNotIn(u, s) with testcontext() as u: s.add(u) self.assertIn(u, s) t = s.copy() with testcontext() as u: s.update(t) self.assertEqual(len(s), len(t)) with testcontext() as u: s.clear() self.assertEqual(len(s), 0)
class ObjectsCleaner(object): def __init__(self, attribute_name): self.attribute_name = attribute_name self.objects = WeakSet() def add(self, obj): self.objects.add(obj) def clear(self, instance=None): if instance is None: self._clear_all_objects() else: self._clear_obj(instance) try: self.objects.remove(instance) except KeyError: pass def _clear_all_objects(self): for obj in self.objects: self._clear_obj(obj) self.objects.clear() def _clear_obj(self, obj): try: delattr(obj, self.attribute_name) except AttributeError: pass
class SurfaceUpdaters(State): ''' Keep track of the surface auto update routines so they can be saved in sessions. ''' def __init__(self, updaters=[]): from weakref import WeakSet self._updaters = WeakSet(updaters) def add(self, updater): ''' An updater is a callable object taking no arguments that updates a surface. ''' self._updaters.add(updater) def take_snapshot(self, session, flags): updaters = tuple(u for u in self._updaters if not _updater_closed(u)) data = {'updaters': updaters, 'version': 1} return data @classmethod def restore_snapshot(cls, session, data): return SurfaceUpdaters(data['updaters']) def clear(self): self._updaters.clear()
class Signal(object): def __init__(self): self._functions = WeakSet() self._methods = WeakKeyDictionary() def __call__(self, *args, **kargs): # Call handler functions to_be_removed = [] for func in self._functions.copy(): try: func(*args, **kargs) except RuntimeError: Warning.warn( 'Signals func->RuntimeError: func "{}" will be removed.'. format(func)) to_be_removed.append(func) for remove in to_be_removed: self._functions.discard(remove) # Call handler methods to_be_removed = [] emitters = self._methods.copy() for obj, funcs in emitters.items(): msg_debug('obj is type "{}"'.format(type(obj))) for func in funcs.copy(): try: func(obj, *args, **kargs) except RuntimeError: warnings.warn( 'Signals methods->RuntimeError, obj.func "{}.{}" will be removed' .format(obj, func)) to_be_removed.append((obj, func)) for obj, func in to_be_removed: self._methods[obj].discard(func) def connect(self, slot): if inspect.ismethod(slot): if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) else: self._functions.add(slot) def disconnect(self, slot): if inspect.ismethod(slot): if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) else: if slot in self._functions: self._functions.remove(slot) def clear(self): self._functions.clear() self._methods.clear()
class Signal(object): """ Simple class to emit signals to connected callable receivers. """ def __init__(self): """ Instantiate a new object """ self.funcs = WeakSet() self.meths = WeakKeyDictionary() def connect(self, c): """ Connect a callable as receiver for the signal @param c: signal receiver @type c: Callable """ if inspect.ismethod(c): if c.__self__ not in self.meths: self.meths[c.__self__] = set() self.meths[c.__self__].add(c.__func__) else: if c not in self.funcs: self.funcs.add(c) def disconnect(self, c): """ Disconnect the callable from receiving the signal @param c: signal receiver @type c: Callable """ if inspect.ismethod(c): if c.__self__ in self.meths: self.meths[c.__self__].remove(c.__func__) else: if c in self.funcs: self.funcs.remove(c) def disconnectAll(self): """ Disconnects all signal receivers """ self.funcs.clear() self.meths.clear() def emit(self, *args, **kwargs): """ Fires the signal to all connected receivers """ for c in self.funcs: c(*args, **kwargs) for obj, funcs in self.meths.items(): for func in funcs: func(obj, *args, **kwargs)
class PrivacyService(Service): def __init__(self): self._privacy_instances = WeakSet() self._potential_vehicles_to_check = WeakSet() @property def privacy_instances(self): return self._privacy_instances def check_for_late_violators(self, sim): for privacy in self.privacy_instances: if not privacy.privacy_violators & PrivacyViolators.SIM: continue if not sim in privacy.violators: if sim in privacy.late_violators: continue if privacy.is_sim_shoo_exempt(sim): if not privacy.persistent_instance: privacy.add_exempt_sim(sim) if privacy.persistent_instance: privacy.remove_sim_from_allowed_disallowed(sim) if sim not in privacy.find_violating_sims(): continue privacy.handle_late_violator(sim) return True else: if privacy.persistent_instance: privacy.remove_sim_from_allowed_disallowed(sim) if sim not in privacy.find_violating_sims(): continue privacy.handle_late_violator(sim) return True return False def add_instance(self, instance): self._privacy_instances.add(instance) def remove_instance(self, instance): self.privacy_instances.discard(instance) def stop(self): while self.privacy_instances: instance = self.privacy_instances.pop() instance.cleanup_privacy_instance() self._potential_vehicles_to_check.clear() def get_potential_vehicle_violators(self): return self._potential_vehicles_to_check def add_vehicle_to_monitor(self, vehicle): self._potential_vehicles_to_check.add(vehicle) def remove_vehicle_to_monitor(self, vehicle): self._potential_vehicles_to_check.discard(vehicle)
class Signal(object): def __init__(self): self._functions = WeakSet() self._methods = WeakKeyDictionary() def __call__(self, *args, **kargs): # Call handler functions to_be_removed = [] for func in self._functions.copy(): try: func(*args, **kargs) except RuntimeError: Warning.warn('Signals func->RuntimeError: func "{}" will be removed.'.format(func)) to_be_removed.append(func) for remove in to_be_removed: self._functions.discard(remove) # Call handler methods to_be_removed = [] emitters = self._methods.copy() for obj, funcs in emitters.items(): msg_debug('obj is type "{}"'.format(type(obj))) for func in funcs.copy(): try: func(obj, *args, **kargs) except RuntimeError: warnings.warn('Signals methods->RuntimeError, obj.func "{}.{}" will be removed'.format(obj, func)) to_be_removed.append((obj, func)) for obj, func in to_be_removed: self._methods[obj].discard(func) def connect(self, slot): if inspect.ismethod(slot): if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) else: self._functions.add(slot) def disconnect(self, slot): if inspect.ismethod(slot): if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) else: if slot in self._functions: self._functions.remove(slot) def clear(self): self._functions.clear() self._methods.clear()
class Signal(object): def __init__(self): self._functions = WeakSet() self._methods = WeakKeyDictionary() self._activated = True def __call__(self, *args, **kargs): # call connected functions only if activated if self._activated: # Call handler functions for func in self._functions: func(*args, **kargs) # Call handler methods for obj, funcs in self._methods.items(): for func in funcs: func(obj, *args, **kargs) def connect(self, slot): if inspect.ismethod(slot): if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) else: self._functions.add(slot) def disconnect(self, slot): if inspect.ismethod(slot): if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) else: if slot in self._functions: self._functions.remove(slot) def clear(self): self._functions.clear() self._methods.clear() def activate(self): """ Activate the signal to emit. """ self._activated = True def deactivate(self): """ Deactivate the signal to emit. """ self._activated = False
class ResourceContext(object): """A resource context is a "namespace" for special resources. Resource can be: docker containers, etc. All resource must be created within a ResourceContext. When a ResourceContext is cleaned up, all resources therein must be cleaned up properly. All creations/modifications of resource therein are synchronized with a lock.""" def __init__(self, name): super(ResourceContext, self).__init__() self._name = name self._lock = threading.Lock() self._catalog = dict() self._subcontexts = WeakSet() _log.debug('Created resource context %s', self._name) def ensure_resource(self, rtype, *args): """Ensure a resource exists in the context. If already exists, return the URI. If not, create it and return the URI.""" sig = _ResourceFactory.get_signature(rtype, *args) with self._lock: if sig not in self._catalog: res = _ResourceFactory.create(rtype, *args) self._catalog[sig] = res rv = self._catalog[sig].uri return rv def create_subcontext(self, name): sc = ResourceContext(name) self._subcontexts.add(sc) return sc def cleanup(self): """Recursively clean up all sub-contexts under this one, including itself. This method could be called multiple times.""" _log.debug('Destroying resource context %s', self._name) with self._lock: # Clean up descendants for sc in self._subcontexts: sc.cleanup() self._subcontexts.clear() # Clean up myself for (_, res) in self._catalog.iteritems(): res.cleanup() self._catalog.clear() __del__ = cleanup
class LazyConstants(object): def __init__(self): self._watchable_objects = WeakSet() def _watch_object(self, object): if object.watcher is not None: self._watchable_objects.add(object) def _add_dependency(self, object): pass def _unwatch_object(self, object): pass def _invalidate_all(self): for watchable_object in self._watchable_objects: watchable_object.invalidate() watchable_object.inited = False self._watchable_objects.clear()
class Signal(object): def __init__(self): self._functions = WeakSet() self._methods = WeakKeyDictionary() def __call__(self, *args, **kargs): res = [] # Call handler functions for func in self._functions: res.append(func(*args, **kargs)) # Call handler methods for obj, funcs in self._methods.items(): for func in funcs: res.append(func(obj, *args, **kargs)) return res def connect(self, slot): if inspect.ismethod(slot): if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) else: self._functions.add(slot) def disconnect(self, slot): if inspect.ismethod(slot): if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) else: if slot in self._functions: self._functions.remove(slot) def clear(self): self._functions.clear() self._methods.clear()
class SIGNAL(object): def __init__( self, name = None ): self._functions = WeakSet() self._methods = WeakKeyDictionary() self._name = name def __call__(self, *args, **kargs): # Call handler functions for func in self._functions: func(*args, **kargs) # Call handler methods for obj, funcs in self._methods.items(): for func in funcs: func(obj, *args, **kargs) def connect(self, slot): if inspect.ismethod(slot): if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) else: self._functions.add(slot) def disconnect(self, slot): if inspect.ismethod(slot): if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) else: if slot in self._functions: self._functions.remove(slot) def clear(self): self._functions.clear() self._methods.clear()
class SIGNAL(object): def __init__(self, name=None): self._functions = WeakSet() self._methods = WeakKeyDictionary() self._name = name def __call__(self, *args, **kargs): # Call handler functions for func in self._functions: func(*args, **kargs) # Call handler methods for obj, funcs in self._methods.items(): for func in funcs: func(obj, *args, **kargs) def connect(self, slot): if inspect.ismethod(slot): if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) else: self._functions.add(slot) def disconnect(self, slot): if inspect.ismethod(slot): if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) else: if slot in self._functions: self._functions.remove(slot) def clear(self): self._functions.clear() self._methods.clear()
def test_weak_destroy_and_mutate_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed items = [ustr(c) for c in string.ascii_letters] s = WeakSet(items) @contextlib.contextmanager def testcontext(): try: it = iter(s) # Start iterator yielded = ustr(str(next(it))) # Schedule an item for removal and recreate it u = ustr(str(items.pop())) if yielded == u: # The iterator still has a reference to the removed item, # advance it (issue #20006). next(it) gc.collect() # just in case yield u finally: it = None # should commit all removals with testcontext() as u: self.assertNotIn(u, s) with testcontext() as u: self.assertRaises(KeyError, s.remove, u) self.assertNotIn(u, s) with testcontext() as u: s.add(u) self.assertIn(u, s) t = s.copy() with testcontext() as u: s.update(t) self.assertEqual(len(s), len(t)) with testcontext() as u: s.clear() self.assertEqual(len(s), 0)
class Signal(object): """ class for signal slot concept Example ------- A simple example for a callback is >>> event = Signal() >>> event.connect(mfunc) >>> # raise the signal >>> event("hello") >>> >>> # functions can be disconnected >>> even.disconnect(myfunc) Since weak references are used, care has to be taken with object functions >>> obj = MyClass() >>> event.connect(obj.myfunc) # works >>> event.connect(MyClass().myfunc) # will not work The second example for member functions will not work since the Signal class uses weakref and therefore does not increase the reference counter. MyClass() only exists for the time of the function call and will be deleted afterwards and the weakref will become invalid. """ def __init__(self): self._functions = WeakSet() self._methods = WeakKeyDictionary() def __call__(self, *args, **kargs): """ raise the event """ # Call handler functions for func in self._functions: func(*args, **kargs) # Call handler methods for obj, funcs in self._methods.items(): for func in funcs: func(obj, *args, **kargs) def connect(self, slot): """ connect a function / member function to the signal """ if inspect.ismethod(slot): if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) else: self._functions.add(slot) def disconnect(self, slot): """ disconnect a function from the signal """ if inspect.ismethod(slot): if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) else: if slot in self._functions: self._functions.remove(slot) def clear(self): """ remove all callbacks from the signal """ self._functions.clear() self._methods.clear()
class ParallelRun: """ Encapsulates running several functions in parallel. Due to the GIL, this is mainly useful for IO-bound threads, such as downloading stuff from the Internet, or writing big buffers of data. It's recommended to use this in a `with` block, to ensure the thread pool gets properly shut down. """ def __init__(self, parallelism: Optional[int] = None) -> None: self.pool = ThreadPool(processes=(parallelism or (int(os.cpu_count() or 1) * 2))) self.task_complete_event = threading.Event() self.tasks = [] # type: List[ApplyResult[Any]] self.completed_tasks = WeakSet() # type: WeakSet[ApplyResult[Any]] def __enter__(self) -> "ParallelRun": return self def __exit__(self, exc_type, exc_val, exc_tb) -> None: # type: ignore[no-untyped-def] self.pool.terminate() def __del__(self) -> None: # opportunistic cleanup if self.pool: self.pool.terminate() self.pool = None # type: ignore[assignment] def _set_task_complete_event(self, value: Any = None) -> None: self.task_complete_event.set() def add_task( self, task: Callable[..., RT], name: Optional[str] = None, args: Tuple[Any, ...] = (), kwargs: Optional[Dict[str, Any]] = None, ) -> "ApplyResult[RT]": """ Begin running a function (in a secondary thread). :param task: The function to run. :param name: A name for the task. If none is specified, it's derived from the callable. :param args: Positional arguments, if any. :param kwargs: Keyword arguments, if any. """ if not name: name = (getattr(task, '__name__' or None) or str(task)) # type: ignore[arg-type] p_task = self.pool.apply_async( task, args=args, kwds=(kwargs or {}), callback=self._set_task_complete_event, error_callback=self._set_task_complete_event, ) setattr(p_task, "name", str(name)) # noqa: B010 self.tasks.append(p_task) # Clear completed tasks, in case someone calls `add_task` # while `.wait()` is in progress. This will of course cause `.wait()` # to have to do some extra work, but that's fine. self.completed_tasks.clear() return p_task def wait( self, fail_fast: bool = True, interval: float = 0.5, callback: Optional[Callable[["ParallelRun"], None]] = None, max_wait: Optional[float] = None ) -> List["ApplyResult[Any]"]: """ Wait until all of the current tasks have finished, or until `max_wait` seconds (if set) has been waited for. If `fail_fast` is True and any of them raises an exception, the exception is reraised within a ParallelException. In this case, the rest of the tasks will continue to run. :param fail_fast: Whether to abort the `wait` as soon as a task crashes. :param interval: Loop sleep interval. :param callback: A function that is called on each wait loop iteration. Receives one parameter, the parallel run instance itself. :param max_wait: Maximum wait time, in seconds. Infinity if not set or zero. :raises TaskFailed: If any task crashes (only when fail_fast is true). :raises TimeoutError: If max_wait seconds have elapsed. """ # Keep track of tasks we've certifiably seen completed, # to avoid having to acquire a lock for the `ready` event # when we don't need to. self.completed_tasks.clear() start_time = time.time() while True: if max_wait: waited_for = (time.time() - start_time) if waited_for > max_wait: raise TimeoutError(f"Waited for {waited_for}/{max_wait} seconds.") had_any_incomplete_task = self._wait_tick(fail_fast) if callback: callback(self) # If there were no incomplete tasks left last iteration, quit. if not had_any_incomplete_task: break # If the number of completed tasks equals the number of tasks to process, # we're likewise done. if len(self.completed_tasks) == len(self.tasks): break # Otherwise wait for a bit before trying again (unless a task completes) self.task_complete_event.wait(interval) # Reset the flag in case it had been set self.task_complete_event.clear() return list(self.completed_tasks) # We can just as well return the completed tasks. def _wait_tick(self, fail_fast: bool) -> bool: # Keep track of whether there were any incomplete tasks this loop. had_any_incomplete_task = False for task in self.tasks: # :type: ApplyResult # If we've already seen this task completed, don't bother. if task in self.completed_tasks: continue # Poll the task to see if it's ready. is_ready = task.ready() if not is_ready: # If it's not yet ready, we need to loop once more, # and we can't check for success now. had_any_incomplete_task = True continue # Mark this task as completed (for good or for worse), # so we don't need to re-check it. self.completed_tasks.add(task) # Raise an exception if we're failing fast. # We're accessing `_success` directly instead of using # `.successful()` to avoid re-locking (as `.ready()` would). # Similarly, we access `._value` in order to avoid actually # raising the exception directly. if fail_fast and not task._success: # type: ignore[attr-defined] exc = task._value # type: ignore[attr-defined] message = f'[{task.name}] {str(exc)}' # type: ignore[attr-defined] raise TaskFailed( message, task=task, exception=exc, ) from exc return had_any_incomplete_task def maybe_raise(self) -> None: """ Raise a `TasksFailed` if any of the run tasks ended up raising an exception. """ exceptions = self.exceptions if exceptions: raise TasksFailed( '%d exceptions occurred' % len(exceptions), exception_map=exceptions, ) @property def return_values(self) -> Dict[str, Any]: """ Get the return values (if resolved yet) of the tasks. :return: dictionary of name to return value. """ return {t.name: t._value for t in self.tasks if t.ready()} # type: ignore[attr-defined] @property def exceptions(self) -> Dict[str, Exception]: """ Get the exceptions (if any) of the tasks. :return: dictionary of task name to exception. """ return { t.name: t._value # type: ignore[attr-defined] for t in self.tasks if t.ready() and not t._success # type: ignore[attr-defined] }
class Signal: ## Signal types. # These indicate the type of a signal, that is, how the signal handles calling the connected # slots. # - Direct connections immediately call the connected slots from the thread that called emit(). # - Auto connections will push the call onto the event loop if the current thread is # not the main thread, but make a direct call if it is. # - Queued connections will always push # the call on to the event loop. Direct = 1 Auto = 2 Queued = 3 ## Initialize the instance. # # \param kwargs Keyword arguments. # Possible keywords: # - type: The signal type. Defaults to Auto. def __init__(self, **kwargs): self.__functions = WeakSet() self.__methods = WeakKeyDictionary() self.__signals = WeakSet() self.__type = kwargs.get("type", Signal.Auto) self.__emitting = False self.__connect_queue = [] self.__disconnect_queue = [] ## \exception NotImplementedError def __call__(self): raise NotImplementedError("Call emit() to emit a signal") ## Get type of the signal # \return \type{int} Direct(1), Auto(2) or Queued(3) def getType(self): return self.__type ## Emit the signal which indirectly calls all of the connected slots. # # \param args The positional arguments to pass along. # \param kwargs The keyword arguments to pass along. # # \note If the Signal type is Queued and this is not called from the application thread # the call will be posted as an event to the application main thread, which means the # function will be called on the next application event loop tick. @call_if_enabled(_traceEmit, _isTraceEnabled()) def emit(self, *args, **kwargs): try: if self.__type == Signal.Queued: Signal._app.functionEvent(CallFunctionEvent(self.emit, args, kwargs)) return if self.__type == Signal.Auto: if threading.current_thread() is not Signal._app.getMainThread(): Signal._app.functionEvent(CallFunctionEvent(self.emit, args, kwargs)) return except AttributeError: # If Signal._app is not set return self.__emitting = True # Call handler functions for func in self.__functions: func(*args, **kwargs) # Call handler methods for dest, funcs in self.__methods.items(): for func in funcs: func(dest, *args, **kwargs) # Emit connected signals for signal in self.__signals: signal.emit(*args, **kwargs) self.__emitting = False for connector in self.__connect_queue: self.connect(connector) self.__connect_queue.clear() for connector in self.__disconnect_queue: self.disconnect(connector) self.__connect_queue.clear() ## Connect to this signal. # \param connector The signal or slot (function) to connect. @call_if_enabled(_traceConnect, _isTraceEnabled()) def connect(self, connector): if self.__emitting: # When we try to connect to a signal we change the dictionary of connectors. # This will cause an Exception since we are iterating over a dictionary that changed. # So instead, defer the connections until after we are done emitting. self.__connect_queue.append(connector) return if isinstance(connector, Signal): if connector == self: return self.__signals.add(connector) elif inspect.ismethod(connector): if connector.__self__ not in self.__methods: self.__methods[connector.__self__] = set() self.__methods[connector.__self__].add(connector.__func__) else: self.__functions.add(connector) ## Disconnect from this signal. # \param connector The signal or slot (function) to disconnect. @call_if_enabled(_traceDisconnect, _isTraceEnabled()) def disconnect(self, connector): if self.__emitting: # See above. self.__disconnect_queue.append(connector) return try: if connector in self.__signals: self.__signals.remove(connector) elif inspect.ismethod(connector) and connector.__self__ in self.__methods: self.__methods[connector.__self__].remove(connector.__func__) else: if connector in self.__functions: self.__functions.remove(connector) except KeyError: #Ignore errors when connector is not connected to this signal. pass ## Disconnect all connected slots. def disconnectAll(self): if self.__emitting: raise RuntimeError("Tried to disconnect signal while signal is being emitted") self.__functions.clear() self.__methods.clear() self.__signals.clear() ## To support Pickle # # Since Weak containers cannot be serialized by Pickle we just return an empty dict as state. def __getstate__(self): return {} ## To proerly handle deepcopy in combination with __getstate__ # # Apparently deepcopy uses __getstate__ internally, which is not documented. The reimplementation # of __getstate__ then breaks deepcopy. On the other hand, if we do not reimplement it like that, # we break pickle. So instead make sure to also reimplement __deepcopy__. def __deepcopy__(self, memo): signal = Signal(type = self.__type) signal.__functions = copy.deepcopy(self.__functions, memo) signal.__methods = copy.deepcopy(self.__methods, memo) signal.__signals = copy.deepcopy(self.__signals, memo) return signal ## private: # To avoid circular references when importing Application, this should be # set by the Application instance. _app = None
class RemoteServiceServer(RemoteServiceBase): """The server side of a RPC communication. Considers all messages coming from the other end as requests for RPCs executions. Will perform them and send results as responses. After having created an instance and initialized it with a socket the reader loop should be started by calling run. """ def __init__(self, local_service, remote_address): """Create a responder for the given service. local_service (Service): the object whose methods should be called via RPC. For other arguments see RemoteServiceBase. """ super(RemoteServiceServer, self).__init__(remote_address) self.local_service = local_service self.pending_incoming_requests_threads = WeakSet() def finalize(self, reason=""): """See RemoteServiceBase.finalize.""" super(RemoteServiceServer, self).finalize(reason) for thread in self.pending_incoming_requests_threads: thread.kill(RPCError(reason), block=False) self.pending_incoming_requests_threads.clear() def handle(self, socket_): self.initialize(socket_, self.remote_address) self.run() def run(self): """Start listening for requests, and go on forever. Read messages from the socket and issue greenlets to parse them, execute methods and send the response to the client. This method won't return as long as there's something to read, it's therefore advisable to spawn a greenlet to call it. """ while True: try: data = self._read() except IOError: break if len(data) == 0: self.finalize("Connection closed.") break gevent.spawn(self.process_data, data) def process_data(self, data): """Handle the message. JSON-decode it and forward it to process_incoming_request (unconditionally!). data (bytes): the message read from the socket. """ # Decode the incoming data. try: message = json.loads(data.decode('utf-8')) except ValueError: self.disconnect("Bad request received") logger.warning("Cannot parse incoming message, discarding.") return self.process_incoming_request(message) def process_incoming_request(self, request): """Handle the request. Parse the request, execute the method it asks for, format the result and send the response. request (dict): the JSON-decoded request. """ # Validate the request. if not {"__id", "__method", "__data"}.issubset(iterkeys(request)): self.disconnect("Bad request received") logger.warning("Request is missing some fields, ignoring.") return # Determine the ID. id_ = request["__id"] # Store the request. self.pending_incoming_requests_threads.add(gevent.getcurrent()) # Build the response. response = {"__id": id_, "__data": None, "__error": None} method_name = request["__method"] if not hasattr(self.local_service, method_name): response["__error"] = "Method %s doesn't exist." % method_name else: method = getattr(self.local_service, method_name) if not getattr(method, "rpc_callable", False): response["__error"] = "Method %s isn't callable." % method_name else: try: response["__data"] = method(**request["__data"]) except Exception as error: response["__error"] = "%s: %s\n%s" % \ (error.__class__.__name__, error, traceback.format_exc()) # Encode it. try: data = json.dumps(response).encode('utf-8') except (TypeError, ValueError): logger.warning("JSON encoding failed.", exc_info=True) return # Send it. try: self._write(data) except IOError: # Log messages have already been produced. return
class Signal(object): ''' The Signal class is an approximation of Qt's signals+slot system. Each event that an object would like to produce requires a Signal() object. All interested parties on the event must register themselves as receivers via connect() or the '+=' operator. The event source calls emit() to produce an event (or treat it as a callable). All registered receivers will receive it, synchronously. ''' def __init__(self, description=None): ''' Create a Signal() object. Pass 'True' to constructor if locking around emit() is required. ''' self.description = description self._functions = WeakSet() self._methods = WeakKeyDictionary() self._forwards = WeakKeyDictionary() self.disabled = False def disable(self): self.disabled = True def enable(self): self.disabled = False def __len__(self): return len(self._functions) + len(self._methods) def __call__(self, *args, **kwargs): return self.emit(*args, **kwargs) def emit(self, *args, **kwargs): 'Invoke the signal with |args| and |kwargs|' results = [] if self.disabled: return results for f in self._functions: if '__predicate__' in f.__dict__: if not f.__dict__['__predicate__'](): continue results.append(f(*args, **kwargs)) for obj, funcs in self._methods.items(): for f in funcs: if '__predicate__' in f.__dict__: if not f.__dict__['__predicate__'](): continue results.append(f(obj, *args, **kwargs)) return results def connect(self, dest, predicate=None): ''' Connect |dest| to the signal. If |predicate| is set, it is treated as a nullary callable whose return value determines if the signal is fired. NOTE: Passing identical values to multiple invocations of connect() with different values of predicate will overwrite previous predicates and persist the last-used value. To achieve a similar effect, wrap |dest| in a function. ''' assert callable(dest) if inspect.ismethod(dest): obj, impl = dest.__self__, dest.__func__ if predicate is not None: impl.__dict__['__predicate__'] = predicate self._methods.setdefault(obj, set()).add(impl) else: if predicate is not None: dest.__dict__['__predicate__'] = predicate self._functions.add(dest) for signal, methods in self._forwards.items(): for method in methods: signal.connect(method) self._forwards.clear() def when(self, obj, signal, arg=False): ''' This forwards signal from obj ''' #will reemit forwarded signal prepending obj to arguments if arg: method = MethodType( lambda *args, **kwargs: self.emit(*args, **kwargs), obj) else: method = self.emit if len(self): signal.connect(method) else: self._forwards.setdefault(signal, set()).add(method) def __iadd__(self, dest): self.connect(dest) return self def disconnect(self, dest): try: if inspect.ismethod(dest): obj, impl = dest.__self__, dest.__func__ self._methods[obj].remove(impl) else: self._functions.remove(dest) except KeyError: raise SignalDisconnectedError() def __isub__(self, dest): self.disconnect(dest) return self def reset(self): self._functions.clear() self._methods.clear() self._forwards.clear()
class TestWeakSet(unittest.TestCase): def setUp(self): # need to keep references to them self.items = [SomeClass(c) for c in ('a', 'b', 'c')] self.items2 = [SomeClass(c) for c in ('x', 'y', 'z')] self.letters = [SomeClass(c) for c in string.ascii_letters] self.s = WeakSet(self.items) self.d = dict.fromkeys(self.items) self.obj = SomeClass('F') self.fs = WeakSet([self.obj]) def test_methods(self): weaksetmethods = dir(WeakSet) for method in dir(set): if method == 'test_c_api' or method.startswith('_'): continue self.assertIn(method, weaksetmethods, "WeakSet missing method " + method) def test_new_or_init(self): self.assertRaises(TypeError, WeakSet, [], 2) def test_len(self): self.assertEqual(len(self.s), len(self.d)) self.assertEqual(len(self.fs), 1) del self.obj self.assertEqual(len(self.fs), 0) def test_contains(self): for c in self.letters: self.assertEqual(c in self.s, c in self.d) # 1 is not weakref'able, but that TypeError is caught by __contains__ self.assertNotIn(1, self.s) self.assertIn(self.obj, self.fs) del self.obj self.assertNotIn(SomeClass('F'), self.fs) def test_union(self): u = self.s.union(self.items2) for c in self.letters: self.assertEqual(c in u, c in self.d or c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(u), WeakSet) self.assertRaises(TypeError, self.s.union, [[]]) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet(self.items + self.items2) c = C(self.items2) self.assertEqual(self.s.union(c), x) def test_or(self): i = self.s.union(self.items2) self.assertEqual(self.s | set(self.items2), i) self.assertEqual(self.s | frozenset(self.items2), i) def test_intersection(self): i = self.s.intersection(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet([]) self.assertEqual(self.s.intersection(C(self.items2)), x) def test_isdisjoint(self): self.assertTrue(self.s.isdisjoint(WeakSet(self.items2))) self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters))) def test_and(self): i = self.s.intersection(self.items2) self.assertEqual(self.s & set(self.items2), i) self.assertEqual(self.s & frozenset(self.items2), i) def test_difference(self): i = self.s.difference(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c not in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.difference, [[]]) def test_sub(self): i = self.s.difference(self.items2) self.assertEqual(self.s - set(self.items2), i) self.assertEqual(self.s - frozenset(self.items2), i) def test_symmetric_difference(self): i = self.s.symmetric_difference(self.items2) for c in self.letters: self.assertEqual(c in i, (c in self.d) ^ (c in self.items2)) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) def test_xor(self): i = self.s.symmetric_difference(self.items2) self.assertEqual(self.s ^ set(self.items2), i) self.assertEqual(self.s ^ frozenset(self.items2), i) def test_sub_and_super(self): pl, ql, rl = map(lambda s: [SomeClass(c) for c in s], ['ab', 'abcde', 'def']) p, q, r = map(WeakSet, (pl, ql, rl)) self.assertTrue(p < q) self.assertTrue(p <= q) self.assertTrue(q <= q) self.assertTrue(q > p) self.assertTrue(q >= p) self.assertFalse(q < r) self.assertFalse(q <= r) self.assertFalse(q > r) self.assertFalse(q >= r) self.assertTrue(set('a').issubset('abc')) self.assertTrue(set('abc').issuperset('a')) self.assertFalse(set('a').issubset('cbs')) self.assertFalse(set('cbs').issuperset('a')) def test_gc(self): # Create a nest of cycles to exercise overall ref count check s = WeakSet(Foo() for i in range(1000)) for elem in s: elem.cycle = s elem.sub = elem elem.set = WeakSet([elem]) def test_subclass_with_custom_hash(self): # Bug #1257731 class H(WeakSet): def __hash__(self): return int(id(self) & 0x7fffffff) s=H() f=set() f.add(s) self.assertIn(s, f) f.remove(s) f.add(s) f.discard(s) def test_init(self): s = WeakSet() s.__init__(self.items) self.assertEqual(s, self.s) s.__init__(self.items2) self.assertEqual(s, WeakSet(self.items2)) self.assertRaises(TypeError, s.__init__, s, 2); self.assertRaises(TypeError, s.__init__, 1); def test_constructor_identity(self): s = WeakSet(self.items) t = WeakSet(s) self.assertNotEqual(id(s), id(t)) def test_hash(self): self.assertRaises(TypeError, hash, self.s) def test_clear(self): self.s.clear() self.assertEqual(self.s, WeakSet([])) self.assertEqual(len(self.s), 0) def test_copy(self): dup = self.s.copy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) def test_add(self): x = SomeClass('Q') self.s.add(x) self.assertIn(x, self.s) dup = self.s.copy() self.s.add(x) self.assertEqual(self.s, dup) self.assertRaises(TypeError, self.s.add, []) self.fs.add(Foo()) self.assertTrue(len(self.fs) == 1) self.fs.add(self.obj) self.assertTrue(len(self.fs) == 1) def test_remove(self): x = SomeClass('a') self.s.remove(x) self.assertNotIn(x, self.s) self.assertRaises(KeyError, self.s.remove, x) self.assertRaises(TypeError, self.s.remove, []) def test_discard(self): a, q = SomeClass('a'), SomeClass('Q') self.s.discard(a) self.assertNotIn(a, self.s) self.s.discard(q) self.assertRaises(TypeError, self.s.discard, []) def test_pop(self): for i in range(len(self.s)): elem = self.s.pop() self.assertNotIn(elem, self.s) self.assertRaises(KeyError, self.s.pop) def test_update(self): retval = self.s.update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): self.assertIn(c, self.s) self.assertRaises(TypeError, self.s.update, [[]]) def test_update_set(self): self.s.update(set(self.items2)) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_ior(self): self.s |= set(self.items2) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_intersection_update(self): retval = self.s.intersection_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.intersection_update, [[]]) def test_iand(self): self.s &= set(self.items2) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_difference_update(self): retval = self.s.difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.difference_update, [[]]) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_isub(self): self.s -= set(self.items2) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_symmetric_difference_update(self): retval = self.s.symmetric_difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_ixor(self): self.s ^= set(self.items2) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_inplace_on_self(self): t = self.s.copy() t |= t self.assertEqual(t, self.s) t &= t self.assertEqual(t, self.s) t -= t self.assertEqual(t, WeakSet()) t = self.s.copy() t ^= t self.assertEqual(t, WeakSet()) def test_eq(self): # issue 5964 self.assertTrue(self.s == self.s) self.assertTrue(self.s == WeakSet(self.items)) self.assertFalse(self.s == set(self.items)) self.assertFalse(self.s == list(self.items)) self.assertFalse(self.s == tuple(self.items)) self.assertFalse(self.s == 1) def test_weak_destroy_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed # Create new items to be sure no-one else holds a reference items = [SomeClass(c) for c in ('a', 'b', 'c')] s = WeakSet(items) it = iter(s) next(it) # Trigger internal iteration # Destroy an item del items[-1] gc.collect() # just in case # We have removed either the first consumed items, or another one self.assertIn(len(list(it)), [len(items), len(items) - 1]) del it # The removal has been committed self.assertEqual(len(s), len(items)) def test_weak_destroy_and_mutate_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed items = [SomeClass(c) for c in string.ascii_letters] s = WeakSet(items) @contextlib.contextmanager def testcontext(): try: it = iter(s) next(it) # Schedule an item for removal and recreate it u = SomeClass(str(items.pop())) gc.collect() # just in case yield u finally: it = None # should commit all removals with testcontext() as u: self.assertNotIn(u, s) with testcontext() as u: self.assertRaises(KeyError, s.remove, u) self.assertNotIn(u, s) with testcontext() as u: s.add(u) self.assertIn(u, s) t = s.copy() with testcontext() as u: s.update(t) self.assertEqual(len(s), len(t)) with testcontext() as u: s.clear() self.assertEqual(len(s), 0)
class Signal(object): """ The Signalling class """ def __init__(self, optimized=False): self._functions = WeakSet() self._after_functions = WeakSet() self._methods = WeakKeyDictionary() self._after_methods = WeakKeyDictionary() self._optimized = optimized def __call__(self, *args, **kargs): res_list = [] # Call handler functions for func in self._functions: res = func(*args, **kargs) if res and self._optimized: return res res_list.append(res) # Call handler methods for obj, funcs in self._methods.items(): for func in funcs: res = func(obj, *args, **kargs) if res and self._optimized: return res res_list.append(res) for func in self._after_functions: res = func(*args, **kargs) if res and self._optimized: return res res_list.append(res) # Call handler methods for obj, funcs in self._after_methods.items(): for func in funcs: res = func(obj, *args, **kargs) if res and self._optimized: return res res_list.append(res) if self._optimized: return None return res_list def connect(self, slot): """ @slot: The method to be called on signal emission Connects to @slot """ if inspect.ismethod(slot): if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) else: self._functions.add(slot) def connect_after(self, slot): """ @slot: The method to be called at last stage of signal emission Connects to the signal after the signals has been handled by other connect callbacks. """ if inspect.ismethod(slot): if slot.__self__ not in self._after_methods: self._after_methods[slot.__self__] = set() self._after_methods[slot.__self__].add(slot.__func__) else: self._after_functions.add(slot) def disconnect(self, slot): """ Disconnect @slot from the signal """ if inspect.ismethod(slot): if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) elif slot.__self__ in self._after_methods: self._after_methods[slot.__self__].remove(slot.__func__) else: if slot in self._functions: self._functions.remove(slot) elif slot in self._after_functions: self._after_functions.remove(slot) def clear(self): """ Cleanup the signal """ self._functions.clear() self._methods.clear() self._after_functions.clear() self._after_methods.clear()
class WindowManager(Manager): DIAGNOSTIC_PHANTOM_KEY = "lsp_diagnostic_phantom" def __init__( self, window: sublime.Window, workspace: ProjectFolders, configs: WindowConfigManager, ) -> None: self._window = window self._configs = configs self._sessions = WeakSet() # type: WeakSet[Session] self._workspace = workspace self._pending_listeners = deque() # type: Deque[AbstractViewListener] self._listeners = WeakSet() # type: WeakSet[AbstractViewListener] self._new_listener = None # type: Optional[AbstractViewListener] self._new_session = None # type: Optional[Session] self._diagnostic_phantom_set = None # type: Optional[sublime.PhantomSet] self._panel_code_phantoms = None # type: Optional[sublime.PhantomSet] self.total_error_count = 0 self.total_warning_count = 0 sublime.set_timeout( functools.partial(self._update_panel_main_thread, _NO_DIAGNOSTICS_PLACEHOLDER, [])) def get_config_manager(self) -> WindowConfigManager: return self._configs def on_load_project_async(self) -> None: self.update_workspace_folders_async() self._configs.update() def on_post_save_project_async(self) -> None: self.on_load_project_async() def update_workspace_folders_async(self) -> None: if self._workspace.update(): workspace_folders = self._workspace.get_workspace_folders() for session in self._sessions: session.update_folders(workspace_folders) def enable_config_async(self, config_name: str) -> None: self._configs.enable_config(config_name) def disable_config_async(self, config_name: str) -> None: self._configs.disable_config(config_name) def open_location_async(self, location: Location, session_name: Optional[str], view: sublime.View, flags: int = 0, group: int = -1) -> Promise[bool]: for session in self.sessions(view): if session_name is None or session_name == session.config.name: return session.open_location_async(location, flags, group) return Promise.resolve(False) def register_listener_async(self, listener: AbstractViewListener) -> None: set_diagnostics_count(listener.view, self.total_error_count, self.total_warning_count) # Update workspace folders in case the user have changed those since window was created. # There is no currently no notification in ST that would notify about folder changes. self.update_workspace_folders_async() self._pending_listeners.appendleft(listener) if self._new_listener is None: self._dequeue_listener_async() def unregister_listener_async(self, listener: AbstractViewListener) -> None: self._listeners.discard(listener) def listeners(self) -> Generator[AbstractViewListener, None, None]: yield from self._listeners def listener_for_view( self, view: sublime.View) -> Optional[AbstractViewListener]: for listener in self.listeners(): if listener.view == view: return listener return None def _dequeue_listener_async(self) -> None: listener = None # type: Optional[AbstractViewListener] if self._new_listener is not None: listener = self._new_listener # debug("re-checking listener", listener) self._new_listener = None else: try: listener = self._pending_listeners.pop() if not listener.view.is_valid(): # debug("listener", listener, "is no longer valid") return self._dequeue_listener_async() # debug("adding new pending listener", listener) self._listeners.add(listener) except IndexError: # We have handled all pending listeners. self._new_session = None return if self._new_session: self._sessions.add(self._new_session) self._publish_sessions_to_listener_async(listener) if self._new_session: if not any(self._new_session.session_views_async()): self._sessions.discard(self._new_session) self._new_session.end_async() self._new_session = None config = self._needed_config(listener.view) if config: # debug("found new config for listener", listener) self._new_listener = listener self.start_async(config, listener.view) else: # debug("no new config found for listener", listener) self._new_listener = None self._dequeue_listener_async() def _publish_sessions_to_listener_async( self, listener: AbstractViewListener) -> None: inside_workspace = self._workspace.contains(listener.view) scheme = urllib.parse.urlparse(listener.get_uri()).scheme for session in self._sessions: if session.can_handle(listener.view, scheme, capability=None, inside_workspace=inside_workspace): # debug("registering session", session.config.name, "to listener", listener) try: listener.on_session_initialized_async(session) except Exception as ex: message = "failed to register session {} to listener {}".format( session.config.name, listener) exception_log(message, ex) def window(self) -> sublime.Window: return self._window def sessions( self, view: sublime.View, capability: Optional[str] = None ) -> Generator[Session, None, None]: inside_workspace = self._workspace.contains(view) sessions = list(self._sessions) uri = view.settings().get("lsp_uri") if not isinstance(uri, str): return scheme = urllib.parse.urlparse(uri).scheme for session in sessions: if session.can_handle(view, scheme, capability, inside_workspace): yield session def get_session(self, config_name: str, file_path: str) -> Optional[Session]: return self._find_session(config_name, file_path) def _can_start_config(self, config_name: str, file_path: str) -> bool: return not bool(self._find_session(config_name, file_path)) def _find_session(self, config_name: str, file_path: str) -> Optional[Session]: inside = self._workspace.contains(file_path) for session in self._sessions: if session.config.name == config_name and session.handles_path( file_path, inside): return session return None def _needed_config(self, view: sublime.View) -> Optional[ClientConfig]: configs = self._configs.match_view(view) handled = False file_name = view.file_name() inside = self._workspace.contains(view) for config in configs: handled = False for session in self._sessions: if config.name == session.config.name and session.handles_path( file_name, inside): handled = True break if not handled: return config return None def start_async(self, config: ClientConfig, initiating_view: sublime.View) -> None: config = ClientConfig.from_config(config, {}) file_path = initiating_view.file_name() or '' if not self._can_start_config(config.name, file_path): # debug('Already starting on this window:', config.name) return try: workspace_folders = sorted_workspace_folders( self._workspace.folders, file_path) plugin_class = get_plugin(config.name) variables = extract_variables(self._window) cwd = None # type: Optional[str] if plugin_class is not None: if plugin_class.needs_update_or_installation(): config.set_view_status(initiating_view, "installing...") plugin_class.install_or_update() additional_variables = plugin_class.additional_variables() if isinstance(additional_variables, dict): variables.update(additional_variables) cannot_start_reason = plugin_class.can_start( self._window, initiating_view, workspace_folders, config) if cannot_start_reason: config.erase_view_status(initiating_view) message = "cannot start {}: {}".format( config.name, cannot_start_reason) self._configs.disable_config(config.name, only_for_session=True) # Continue with handling pending listeners self._new_session = None sublime.set_timeout_async(self._dequeue_listener_async) return self._window.status_message(message) cwd = plugin_class.on_pre_start(self._window, initiating_view, workspace_folders, config) config.set_view_status(initiating_view, "starting...") session = Session(self, self._create_logger(config.name), workspace_folders, config, plugin_class) if cwd: transport_cwd = cwd # type: Optional[str] else: transport_cwd = workspace_folders[ 0].path if workspace_folders else None transport_config = config.resolve_transport_config(variables) transport = create_transport(transport_config, transport_cwd, session) if plugin_class: plugin_class.on_post_start(self._window, initiating_view, workspace_folders, config) config.set_view_status(initiating_view, "initialize") session.initialize_async(variables=variables, transport=transport, working_directory=cwd, init_callback=functools.partial( self._on_post_session_initialize, initiating_view)) self._new_session = session except Exception as e: message = "".join(( "Failed to start {0} - disabling for this window for the duration of the current session.\n", "Re-enable by running \"LSP: Enable Language Server In Project\" from the Command Palette.", "\n\n--- Error: ---\n{1}")).format(config.name, str(e)) exception_log( "Unable to start subprocess for {}".format(config.name), e) if isinstance(e, CalledProcessError): print("Server output:\n{}".format( e.output.decode('utf-8', 'replace'))) self._configs.disable_config(config.name, only_for_session=True) config.erase_view_status(initiating_view) sublime.message_dialog(message) # Continue with handling pending listeners self._new_session = None sublime.set_timeout_async(self._dequeue_listener_async) def _on_post_session_initialize(self, initiating_view: sublime.View, session: Session, is_error: bool = False) -> None: if is_error: session.config.erase_view_status(initiating_view) self._new_listener = None self._new_session = None else: sublime.set_timeout_async(self._dequeue_listener_async) def _create_logger(self, config_name: str) -> Logger: logger_map = { "panel": PanelLogger, "remote": RemoteLogger, } loggers = [] for logger_type in userprefs().log_server: if logger_type not in logger_map: debug( "Invalid logger type ({}) specified for log_server settings" .format(logger_type)) continue loggers.append(logger_map[logger_type]) if len(loggers) == 0: return RouterLogger() # logs nothing elif len(loggers) == 1: return loggers[0](self, config_name) else: router_logger = RouterLogger() for logger in loggers: router_logger.append(logger(self, config_name)) return router_logger def handle_message_request(self, session: Session, params: Any, request_id: Any) -> None: view = self._window.active_view() if view: MessageRequestHandler(view, session, request_id, params, session.config.name).show() def restart_sessions_async(self) -> None: self._end_sessions_async() listeners = list(self._listeners) self._listeners.clear() for listener in listeners: self.register_listener_async(listener) def _end_sessions_async(self) -> None: for session in self._sessions: session.end_async() self._sessions.clear() def end_config_sessions_async(self, config_name: str) -> None: sessions = list(self._sessions) for session in sessions: if session.config.name == config_name: session.end_async() self._sessions.discard(session) def get_project_path(self, file_path: str) -> Optional[str]: candidate = None # type: Optional[str] for folder in self._workspace.folders: if file_path.startswith(folder): if candidate is None or len(folder) > len(candidate): candidate = folder return candidate def should_present_diagnostics(self, uri: DocumentUri) -> Optional[str]: scheme, path = parse_uri(uri) if scheme != "file": return None if not self._workspace.contains(path): return "not inside window folders" view = self._window.active_view() if not view: return None settings = view.settings() if matches_pattern(path, settings.get("binary_file_patterns")): return "matches a pattern in binary_file_patterns" if matches_pattern(path, settings.get("file_exclude_patterns")): return "matches a pattern in file_exclude_patterns" if matches_pattern(path, settings.get("folder_exclude_patterns")): return "matches a pattern in folder_exclude_patterns" return None def on_post_exit_async(self, session: Session, exit_code: int, exception: Optional[Exception]) -> None: self._sessions.discard(session) for listener in self._listeners: listener.on_session_shutdown_async(session) if exit_code != 0 or exception: config = session.config msg = "".join(( "{0} exited with status code {1}. ", "Do you want to restart it? If you choose Cancel, it will be disabled for this window for the ", "duration of the current session. ", "Re-enable by running \"LSP: Enable Language Server In Project\" from the Command Palette." )).format(config.name, exit_code) if exception: msg += "\n\n--- Error: ---\n{}".format(str(exception)) if sublime.ok_cancel_dialog(msg, "Restart {}".format(config.name)): for listener in self._listeners: self.register_listener_async(listener) else: self._configs.disable_config(config.name, only_for_session=True) def plugin_unloaded(self) -> None: """ This is called **from the main thread** when the plugin unloads. In that case we must destroy all sessions from the main thread. That could lead to some dict/list being mutated while iterated over, so be careful """ self._end_sessions_async() def handle_server_message(self, server_name: str, message: str) -> None: sublime.set_timeout( lambda: log_server_message(self._window, server_name, message)) def handle_log_message(self, session: Session, params: Any) -> None: self.handle_server_message(session.config.name, extract_message(params)) def handle_stderr_log(self, session: Session, message: str) -> None: self.handle_server_message(session.config.name, message) def handle_show_message(self, session: Session, params: Any) -> None: sublime.status_message("{}: {}".format(session.config.name, extract_message(params))) def update_diagnostics_panel_async(self) -> None: to_render = [] # type: List[str] self.total_error_count = 0 self.total_warning_count = 0 listeners = list(self._listeners) prephantoms = [] # type: List[Tuple[int, int, str, str]] row = 0 contributions = OrderedDict( ) # type: OrderedDict[str, List[Tuple[str, Optional[int], Optional[str], Optional[str]]]] for session in self._sessions: local_errors, local_warnings = session.diagnostics_manager.sum_total_errors_and_warnings_async( ) self.total_error_count += local_errors self.total_warning_count += local_warnings for path, contribution in session.diagnostics_manager.diagnostics_panel_contributions_async( ): seen = path in contributions contributions.setdefault(path, []).extend(contribution) if not seen: contributions.move_to_end(path) for path, contribution in contributions.items(): to_render.append("{}:".format(path)) row += 1 for content, offset, code, href in contribution: to_render.append(content) if offset is not None and code is not None and href is not None: prephantoms.append((row, offset, code, href)) row += content.count("\n") + 1 to_render.append("") # add spacing between filenames row += 1 for listener in listeners: set_diagnostics_count(listener.view, self.total_error_count, self.total_warning_count) characters = "\n".join(to_render) if not characters: characters = _NO_DIAGNOSTICS_PLACEHOLDER sublime.set_timeout( functools.partial(self._update_panel_main_thread, characters, prephantoms)) def _update_panel_main_thread( self, characters: str, prephantoms: List[Tuple[int, int, str, str]]) -> None: panel = ensure_diagnostics_panel(self._window) if not panel or not panel.is_valid(): return panel.run_command("lsp_update_panel", {"characters": characters}) if self._panel_code_phantoms is None: self._panel_code_phantoms = sublime.PhantomSet(panel, "hrefs") phantoms = [] # type: List[sublime.Phantom] for row, col, code, href in prephantoms: point = panel.text_point(row, col) region = sublime.Region(point, point) phantoms.append( sublime.Phantom(region, make_link(href, code), sublime.LAYOUT_INLINE)) self._panel_code_phantoms.update(phantoms) def show_diagnostics_panel_async(self) -> None: if self._window.active_panel() is None: self._window.run_command("show_panel", {"panel": "output.diagnostics"})
class TestWeakSet(unittest.TestCase): def setUp(self): # need to keep references to them self.items = [ustr(c) for c in ('a', 'b', 'c')] self.items2 = [ustr(c) for c in ('x', 'y', 'z')] self.letters = [ustr(c) for c in string.ascii_letters] self.s = WeakSet(self.items) self.d = dict.fromkeys(self.items) self.obj = ustr('F') self.fs = WeakSet([self.obj]) def test_methods(self): weaksetmethods = dir(WeakSet) for method in dir(set): if method == 'test_c_api' or method.startswith('_'): continue self.assert_(method in weaksetmethods, "WeakSet missing method " + method) def test_new_or_init(self): self.assertRaises(TypeError, WeakSet, [], 2) def test_len(self): self.assertEqual(len(self.s), len(self.d)) self.assertEqual(len(self.fs), 1) del self.obj self.assertEqual(len(self.fs), 0) def test_contains(self): for c in self.letters: self.assertEqual(c in self.s, c in self.d) self.assertRaises(TypeError, self.s.__contains__, [[]]) self.assert_(self.obj in self.fs) del self.obj self.assert_(ustr('F') not in self.fs) def test_union(self): u = self.s.union(self.items2) for c in self.letters: self.assertEqual(c in u, c in self.d or c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(u), WeakSet) self.assertRaises(TypeError, self.s.union, [[]]) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet(self.items + self.items2) c = C(self.items2) self.assertEqual(self.s.union(c), x) def test_or(self): i = self.s.union(self.items2) self.assertEqual(self.s | set(self.items2), i) self.assertEqual(self.s | frozenset(self.items2), i) def test_intersection(self): i = self.s.intersection(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet([]) self.assertEqual(self.s.intersection(C(self.items2)), x) def test_isdisjoint(self): self.assert_(self.s.isdisjoint(WeakSet(self.items2))) self.assert_(not self.s.isdisjoint(WeakSet(self.letters))) def test_and(self): i = self.s.intersection(self.items2) self.assertEqual(self.s & set(self.items2), i) self.assertEqual(self.s & frozenset(self.items2), i) def test_difference(self): i = self.s.difference(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c not in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.difference, [[]]) def test_sub(self): i = self.s.difference(self.items2) self.assertEqual(self.s - set(self.items2), i) self.assertEqual(self.s - frozenset(self.items2), i) def test_symmetric_difference(self): i = self.s.symmetric_difference(self.items2) for c in self.letters: self.assertEqual(c in i, (c in self.d) ^ (c in self.items2)) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) def test_xor(self): i = self.s.symmetric_difference(self.items2) self.assertEqual(self.s ^ set(self.items2), i) self.assertEqual(self.s ^ frozenset(self.items2), i) def test_sub_and_super(self): pl, ql, rl = map(lambda s: [ustr(c) for c in s], ['ab', 'abcde', 'def']) p, q, r = map(WeakSet, (pl, ql, rl)) self.assert_(p < q) self.assert_(p <= q) self.assert_(q <= q) self.assert_(q > p) self.assert_(q >= p) self.failIf(q < r) self.failIf(q <= r) self.failIf(q > r) self.failIf(q >= r) self.assert_(set('a').issubset('abc')) self.assert_(set('abc').issuperset('a')) self.failIf(set('a').issubset('cbs')) self.failIf(set('cbs').issuperset('a')) def test_gc(self): # Create a nest of cycles to exercise overall ref count check class A: pass s = set(A() for i in range(1000)) for elem in s: elem.cycle = s elem.sub = elem elem.set = set([elem]) def test_subclass_with_custom_hash(self): # Bug #1257731 class H(WeakSet): def __hash__(self): return int(id(self) & 0x7fffffff) s=H() f=set() f.add(s) self.assert_(s in f) f.remove(s) f.add(s) f.discard(s) def test_init(self): s = WeakSet() s.__init__(self.items) self.assertEqual(s, self.s) s.__init__(self.items2) self.assertEqual(s, WeakSet(self.items2)) self.assertRaises(TypeError, s.__init__, s, 2); self.assertRaises(TypeError, s.__init__, 1); def test_constructor_identity(self): s = WeakSet(self.items) t = WeakSet(s) self.assertNotEqual(id(s), id(t)) def test_set_literal(self): s = set([1,2,3]) t = {1,2,3} self.assertEqual(s, t) def test_hash(self): self.assertRaises(TypeError, hash, self.s) def test_clear(self): self.s.clear() self.assertEqual(self.s, set()) self.assertEqual(len(self.s), 0) def test_copy(self): dup = self.s.copy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) def test_add(self): x = ustr('Q') self.s.add(x) self.assert_(x in self.s) dup = self.s.copy() self.s.add(x) self.assertEqual(self.s, dup) self.assertRaises(TypeError, self.s.add, []) self.fs.add(Foo()) self.assert_(len(self.fs) == 1) self.fs.add(self.obj) self.assert_(len(self.fs) == 1) def test_remove(self): x = ustr('a') self.s.remove(x) self.assert_(x not in self.s) self.assertRaises(KeyError, self.s.remove, x) self.assertRaises(TypeError, self.s.remove, []) def test_discard(self): a, q = ustr('a'), ustr('Q') self.s.discard(a) self.assert_(a not in self.s) self.s.discard(q) self.assertRaises(TypeError, self.s.discard, []) def test_pop(self): for i in range(len(self.s)): elem = self.s.pop() self.assert_(elem not in self.s) self.assertRaises(KeyError, self.s.pop) def test_update(self): retval = self.s.update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): self.assert_(c in self.s) self.assertRaises(TypeError, self.s.update, [[]]) def test_update_set(self): self.s.update(set(self.items2)) for c in (self.items + self.items2): self.assert_(c in self.s) def test_ior(self): self.s |= set(self.items2) for c in (self.items + self.items2): self.assert_(c in self.s) def test_intersection_update(self): retval = self.s.intersection_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assert_(c in self.s) else: self.assert_(c not in self.s) self.assertRaises(TypeError, self.s.intersection_update, [[]]) def test_iand(self): self.s &= set(self.items2) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assert_(c in self.s) else: self.assert_(c not in self.s) def test_difference_update(self): retval = self.s.difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assert_(c in self.s) else: self.assert_(c not in self.s) self.assertRaises(TypeError, self.s.difference_update, [[]]) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_isub(self): self.s -= set(self.items2) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assert_(c in self.s) else: self.assert_(c not in self.s) def test_symmetric_difference_update(self): retval = self.s.symmetric_difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assert_(c in self.s) else: self.assert_(c not in self.s) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_ixor(self): self.s ^= set(self.items2) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assert_(c in self.s) else: self.assert_(c not in self.s) def test_inplace_on_self(self): t = self.s.copy() t |= t self.assertEqual(t, self.s) t &= t self.assertEqual(t, self.s) t -= t self.assertEqual(t, WeakSet()) t = self.s.copy() t ^= t self.assertEqual(t, WeakSet())
class Signal: ## Signal types. # These indicate the type of a signal, that is, how the signal handles calling the connected # slots. # - Direct connections immediately call the connected slots from the thread that called emit(). # - Auto connections will push the call onto the event loop if the current thread is # not the main thread, but make a direct call if it is. # - Queued connections will always push # the call on to the event loop. Direct = 1 Auto = 2 Queued = 3 ## Initialize the instance. # # \param kwargs Keyword arguments. # Possible keywords: # - type: The signal type. Defaults to Direct. def __init__(self, **kwargs): self.__functions = WeakSet() self.__methods = WeakKeyDictionary() self.__signals = WeakSet() self.__type = kwargs.get("type", Signal.Auto) self.__emitting = False self.__connect_queue = [] self.__disconnect_queue = [] ## \exception NotImplementedError def __call__(self): raise NotImplementedError("Call emit() to emit a signal") ## Get type of the signal # \return \type{int} Direct(1), Auto(2) or Queued(3) def getType(self): return self.__type ## Emit the signal which indirectly calls all of the connected slots. # # \param args The positional arguments to pass along. # \param kargs The keyword arguments to pass along. # # \note If the Signal type is Queued and this is not called from the application thread # the call will be posted as an event to the application main thread, which means the # function will be called on the next application event loop tick. def emit(self, *args, **kargs): try: if self.__type == Signal.Queued: Signal._app.functionEvent( CallFunctionEvent(self.emit, args, kargs)) return if self.__type == Signal.Auto: if threading.current_thread() is not Signal._app.getMainThread( ): Signal._app.functionEvent( CallFunctionEvent(self.emit, args, kargs)) return except AttributeError: # If Signal._app is not set return self.__emitting = True # Call handler functions for func in self.__functions: func(*args, **kargs) # Call handler methods for dest, funcs in self.__methods.items(): for func in funcs: func(dest, *args, **kargs) # Emit connected signals for signal in self.__signals: signal.emit(*args, **kargs) self.__emitting = False for connector in self.__connect_queue: self.connect(connector) self.__connect_queue.clear() for connector in self.__disconnect_queue: self.disconnect(connector) self.__connect_queue.clear() ## Connect to this signal. # \param connector The signal or slot (function) to connect. def connect(self, connector): if self.__emitting: # When we try to connect to a signal we change the dictionary of connectors. # This will cause an Exception since we are iterating over a dictionary that changed. # So instead, defer the connections until after we are done emitting. self.__connect_queue.append(connector) return if type(connector) == Signal: if connector == self: return self.__signals.add(connector) elif inspect.ismethod(connector): if connector.__self__ not in self.__methods: self.__methods[connector.__self__] = set() self.__methods[connector.__self__].add(connector.__func__) else: self.__functions.add(connector) ## Disconnect from this signal. # \param connector The signal or slot (function) to disconnect. def disconnect(self, connector): if self.__emitting: # See above. self.__disconnect_queue.append(connector) return try: if connector in self.__signals: self.__signals.remove(connector) elif inspect.ismethod( connector) and connector.__self__ in self.__methods: self.__methods[connector.__self__].remove(connector.__func__) else: if connector in self.__functions: self.__functions.remove(connector) except KeyError: #Ignore errors when connector is not connected to this signal. pass ## Disconnect all connected slots. def disconnectAll(self): if self.__emitting: raise RuntimeError( "Tried to disconnect signal while signal is being emitted") self.__functions.clear() self.__methods.clear() self.__signals.clear() ## private: # To avoid circular references when importing Application, this should be # set by the Application instance. _app = None
class TestWeakSet(unittest.TestCase): def setUp(self): self.items = [ustr(c) for c in ('a', 'b', 'c')] self.items2 = [ustr(c) for c in ('x', 'y', 'z')] self.ab_items = [ustr(c) for c in 'ab'] self.abcde_items = [ustr(c) for c in 'abcde'] self.def_items = [ustr(c) for c in 'def'] self.ab_weakset = WeakSet(self.ab_items) self.abcde_weakset = WeakSet(self.abcde_items) self.def_weakset = WeakSet(self.def_items) self.letters = [ustr(c) for c in string.ascii_letters] self.s = WeakSet(self.items) self.d = dict.fromkeys(self.items) self.obj = ustr('F') self.fs = WeakSet([self.obj]) def test_methods(self): weaksetmethods = dir(WeakSet) for method in dir(set): if method == 'test_c_api' or method.startswith('_'): continue self.assertIn(method, weaksetmethods, 'WeakSet missing method ' + method) def test_new_or_init(self): self.assertRaises(TypeError, WeakSet, [], 2) def test_len(self): self.assertEqual(len(self.s), len(self.d)) self.assertEqual(len(self.fs), 1) del self.obj self.assertEqual(len(self.fs), 0) def test_contains(self): for c in self.letters: self.assertEqual(c in self.s, c in self.d) self.assertNotIn(1, self.s) self.assertIn(self.obj, self.fs) del self.obj self.assertNotIn(ustr('F'), self.fs) def test_union(self): u = self.s.union(self.items2) for c in self.letters: self.assertEqual(c in u, c in self.d or c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(u), WeakSet) self.assertRaises(TypeError, self.s.union, [[]]) for C in (set, frozenset, dict.fromkeys, list, tuple): x = WeakSet(self.items + self.items2) c = C(self.items2) self.assertEqual(self.s.union(c), x) del c self.assertEqual(len(u), len(self.items) + len(self.items2)) self.items2.pop() gc.collect() self.assertEqual(len(u), len(self.items) + len(self.items2)) def test_or(self): i = self.s.union(self.items2) self.assertEqual(self.s | set(self.items2), i) self.assertEqual(self.s | frozenset(self.items2), i) def test_intersection(self): s = WeakSet(self.letters) i = s.intersection(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.items2 and c in self.letters) self.assertEqual(s, WeakSet(self.letters)) self.assertEqual(type(i), WeakSet) for C in (set, frozenset, dict.fromkeys, list, tuple): x = WeakSet([]) self.assertEqual(i.intersection(C(self.items)), x) self.assertEqual(len(i), len(self.items2)) self.items2.pop() gc.collect() self.assertEqual(len(i), len(self.items2)) def test_isdisjoint(self): self.assertTrue(self.s.isdisjoint(WeakSet(self.items2))) self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters))) def test_and(self): i = self.s.intersection(self.items2) self.assertEqual(self.s & set(self.items2), i) self.assertEqual(self.s & frozenset(self.items2), i) def test_difference(self): i = self.s.difference(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c not in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.difference, [[]]) def test_sub(self): i = self.s.difference(self.items2) self.assertEqual(self.s - set(self.items2), i) self.assertEqual(self.s - frozenset(self.items2), i) def test_symmetric_difference(self): i = self.s.symmetric_difference(self.items2) for c in self.letters: self.assertEqual(c in i, (c in self.d) ^ (c in self.items2)) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) self.assertEqual(len(i), len(self.items) + len(self.items2)) self.items2.pop() gc.collect() self.assertEqual(len(i), len(self.items) + len(self.items2)) def test_xor(self): i = self.s.symmetric_difference(self.items2) self.assertEqual(self.s ^ set(self.items2), i) self.assertEqual(self.s ^ frozenset(self.items2), i) def test_sub_and_super(self): self.assertTrue(self.ab_weakset <= self.abcde_weakset) self.assertTrue(self.abcde_weakset <= self.abcde_weakset) self.assertTrue(self.abcde_weakset >= self.ab_weakset) self.assertFalse(self.abcde_weakset <= self.def_weakset) self.assertFalse(self.abcde_weakset >= self.def_weakset) self.assertTrue(set('a').issubset('abc')) self.assertTrue(set('abc').issuperset('a')) self.assertFalse(set('a').issubset('cbs')) self.assertFalse(set('cbs').issuperset('a')) def test_lt(self): self.assertTrue(self.ab_weakset < self.abcde_weakset) self.assertFalse(self.abcde_weakset < self.def_weakset) self.assertFalse(self.ab_weakset < self.ab_weakset) self.assertFalse(WeakSet() < WeakSet()) def test_gt(self): self.assertTrue(self.abcde_weakset > self.ab_weakset) self.assertFalse(self.abcde_weakset > self.def_weakset) self.assertFalse(self.ab_weakset > self.ab_weakset) self.assertFalse(WeakSet() > WeakSet()) def test_gc(self): s = WeakSet(Foo() for i in range(1000)) for elem in s: elem.cycle = s elem.sub = elem elem.set = WeakSet([elem]) def test_subclass_with_custom_hash(self): class H(WeakSet): def __hash__(self): return int(id(self) & 2147483647) s = H() f = set() f.add(s) self.assertIn(s, f) f.remove(s) f.add(s) f.discard(s) def test_init(self): s = WeakSet() s.__init__(self.items) self.assertEqual(s, self.s) s.__init__(self.items2) self.assertEqual(s, WeakSet(self.items2)) self.assertRaises(TypeError, s.__init__, s, 2) self.assertRaises(TypeError, s.__init__, 1) def test_constructor_identity(self): s = WeakSet(self.items) t = WeakSet(s) self.assertNotEqual(id(s), id(t)) def test_hash(self): self.assertRaises(TypeError, hash, self.s) def test_clear(self): self.s.clear() self.assertEqual(self.s, WeakSet([])) self.assertEqual(len(self.s), 0) def test_copy(self): dup = self.s.copy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) def test_add(self): x = ustr('Q') self.s.add(x) self.assertIn(x, self.s) dup = self.s.copy() self.s.add(x) self.assertEqual(self.s, dup) self.assertRaises(TypeError, self.s.add, []) self.fs.add(Foo()) self.assertTrue(len(self.fs) == 1) self.fs.add(self.obj) self.assertTrue(len(self.fs) == 1) def test_remove(self): x = ustr('a') self.s.remove(x) self.assertNotIn(x, self.s) self.assertRaises(KeyError, self.s.remove, x) self.assertRaises(TypeError, self.s.remove, []) def test_discard(self): a, q = ustr('a'), ustr('Q') self.s.discard(a) self.assertNotIn(a, self.s) self.s.discard(q) self.assertRaises(TypeError, self.s.discard, []) def test_pop(self): for i in range(len(self.s)): elem = self.s.pop() self.assertNotIn(elem, self.s) self.assertRaises(KeyError, self.s.pop) def test_update(self): retval = self.s.update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): self.assertIn(c, self.s) self.assertRaises(TypeError, self.s.update, [[]]) def test_update_set(self): self.s.update(set(self.items2)) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_ior(self): self.s |= set(self.items2) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_intersection_update(self): retval = self.s.intersection_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.intersection_update, [[]]) def test_iand(self): self.s &= set(self.items2) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_difference_update(self): retval = self.s.difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.difference_update, [[]]) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_isub(self): self.s -= set(self.items2) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_symmetric_difference_update(self): retval = self.s.symmetric_difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_ixor(self): self.s ^= set(self.items2) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_inplace_on_self(self): t = self.s.copy() t |= t self.assertEqual(t, self.s) t &= t self.assertEqual(t, self.s) t -= t self.assertEqual(t, WeakSet()) t = self.s.copy() t ^= t self.assertEqual(t, WeakSet()) def test_eq(self): self.assertTrue(self.s == self.s) self.assertTrue(self.s == WeakSet(self.items)) self.assertFalse(self.s == set(self.items)) self.assertFalse(self.s == list(self.items)) self.assertFalse(self.s == tuple(self.items)) self.assertFalse(self.s == WeakSet([Foo])) self.assertFalse(self.s == 1) def test_ne(self): self.assertTrue(self.s != set(self.items)) s1 = WeakSet() s2 = WeakSet() self.assertFalse(s1 != s2) def test_weak_destroy_while_iterating(self): items = [ustr(c) for c in ('a', 'b', 'c')] s = WeakSet(items) it = iter(s) next(it) del items[-1] gc.collect() self.assertIn(len(list(it)), [len(items), len(items) - 1]) del it self.assertEqual(len(s), len(items)) def test_weak_destroy_and_mutate_while_iterating(self): items = [ustr(c) for c in string.ascii_letters] s = WeakSet(items) @contextlib.contextmanager def testcontext(): try: it = iter(s) yielded = ustr(str(next(it))) u = ustr(str(items.pop())) if yielded == u: next(it) gc.collect() yield u finally: it = None with testcontext() as u: self.assertNotIn(u, s) with testcontext() as u: self.assertRaises(KeyError, s.remove, u) self.assertNotIn(u, s) with testcontext() as u: s.add(u) self.assertIn(u, s) t = s.copy() with testcontext() as u: s.update(t) self.assertEqual(len(s), len(t)) with testcontext() as u: s.clear() self.assertEqual(len(s), 0) def test_len_cycles(self): N = 20 items = [RefCycle() for i in range(N)] s = WeakSet(items) del items it = iter(s) try: next(it) except StopIteration: pass gc.collect() n1 = len(s) del it gc.collect() n2 = len(s) self.assertIn(n1, (0, 1)) self.assertEqual(n2, 0) def test_len_race(self): self.addCleanup(gc.set_threshold, *gc.get_threshold()) for th in range(1, 100): N = 20 gc.collect(0) gc.set_threshold(th, th, th) items = [RefCycle() for i in range(N)] s = WeakSet(items) del items it = iter(s) try: next(it) except StopIteration: pass n1 = len(s) del it n2 = len(s) self.assertGreaterEqual(n1, 0) self.assertLessEqual(n1, N) self.assertGreaterEqual(n2, 0) self.assertLessEqual(n2, n1)
class TestWeakSet(unittest.TestCase): def setUp(self): # need to keep references to them self.items = [ustr(c) for c in ('a', 'b', 'c')] self.items2 = [ustr(c) for c in ('x', 'y', 'z')] self.ab_items = [ustr(c) for c in 'ab'] self.abcde_items = [ustr(c) for c in 'abcde'] self.def_items = [ustr(c) for c in 'def'] self.ab_weakset = WeakSet(self.ab_items) self.abcde_weakset = WeakSet(self.abcde_items) self.def_weakset = WeakSet(self.def_items) self.letters = [ustr(c) for c in string.ascii_letters] self.s = WeakSet(self.items) self.d = dict.fromkeys(self.items) self.obj = ustr('F') self.fs = WeakSet([self.obj]) def test_methods(self): weaksetmethods = dir(WeakSet) for method in dir(set): if method == 'test_c_api' or method.startswith('_'): continue self.assertIn(method, weaksetmethods, "WeakSet missing method " + method) def test_new_or_init(self): self.assertRaises(TypeError, WeakSet, [], 2) @support.impl_detail("finalization", graalvm=False) def test_len(self): self.assertEqual(len(self.s), len(self.d)) self.assertEqual(len(self.fs), 1) del self.obj self.assertEqual(len(self.fs), 0) @support.impl_detail("finalization", graalvm=False) def test_contains(self): for c in self.letters: self.assertEqual(c in self.s, c in self.d) # 1 is not weakref'able, but that TypeError is caught by __contains__ self.assertNotIn(1, self.s) self.assertIn(self.obj, self.fs) del self.obj self.assertNotIn(ustr('F'), self.fs) @support.impl_detail("finalization", graalvm=False) def test_union(self): u = self.s.union(self.items2) for c in self.letters: self.assertEqual(c in u, c in self.d or c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(u), WeakSet) self.assertRaises(TypeError, self.s.union, [[]]) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet(self.items + self.items2) c = C(self.items2) self.assertEqual(self.s.union(c), x) del c self.assertEqual(len(u), len(self.items) + len(self.items2)) self.items2.pop() gc.collect() self.assertEqual(len(u), len(self.items) + len(self.items2)) def test_or(self): i = self.s.union(self.items2) self.assertEqual(self.s | set(self.items2), i) self.assertEqual(self.s | frozenset(self.items2), i) @support.impl_detail("finalization", graalvm=False) def test_intersection(self): s = WeakSet(self.letters) i = s.intersection(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.items2 and c in self.letters) self.assertEqual(s, WeakSet(self.letters)) self.assertEqual(type(i), WeakSet) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet([]) self.assertEqual(i.intersection(C(self.items)), x) self.assertEqual(len(i), len(self.items2)) self.items2.pop() gc.collect() self.assertEqual(len(i), len(self.items2)) def test_isdisjoint(self): self.assertTrue(self.s.isdisjoint(WeakSet(self.items2))) self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters))) def test_and(self): i = self.s.intersection(self.items2) self.assertEqual(self.s & set(self.items2), i) self.assertEqual(self.s & frozenset(self.items2), i) def test_difference(self): i = self.s.difference(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c not in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.difference, [[]]) def test_sub(self): i = self.s.difference(self.items2) self.assertEqual(self.s - set(self.items2), i) self.assertEqual(self.s - frozenset(self.items2), i) @support.impl_detail("finalization", graalvm=False) def test_symmetric_difference(self): i = self.s.symmetric_difference(self.items2) for c in self.letters: self.assertEqual(c in i, (c in self.d) ^ (c in self.items2)) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) self.assertEqual(len(i), len(self.items) + len(self.items2)) self.items2.pop() gc.collect() self.assertEqual(len(i), len(self.items) + len(self.items2)) def test_xor(self): i = self.s.symmetric_difference(self.items2) self.assertEqual(self.s ^ set(self.items2), i) self.assertEqual(self.s ^ frozenset(self.items2), i) def test_sub_and_super(self): self.assertTrue(self.ab_weakset <= self.abcde_weakset) self.assertTrue(self.abcde_weakset <= self.abcde_weakset) self.assertTrue(self.abcde_weakset >= self.ab_weakset) self.assertFalse(self.abcde_weakset <= self.def_weakset) self.assertFalse(self.abcde_weakset >= self.def_weakset) self.assertTrue(set('a').issubset('abc')) self.assertTrue(set('abc').issuperset('a')) self.assertFalse(set('a').issubset('cbs')) self.assertFalse(set('cbs').issuperset('a')) def test_lt(self): self.assertTrue(self.ab_weakset < self.abcde_weakset) self.assertFalse(self.abcde_weakset < self.def_weakset) self.assertFalse(self.ab_weakset < self.ab_weakset) self.assertFalse(WeakSet() < WeakSet()) def test_gt(self): self.assertTrue(self.abcde_weakset > self.ab_weakset) self.assertFalse(self.abcde_weakset > self.def_weakset) self.assertFalse(self.ab_weakset > self.ab_weakset) self.assertFalse(WeakSet() > WeakSet()) def test_gc(self): # Create a nest of cycles to exercise overall ref count check s = WeakSet(Foo() for i in range(1000)) for elem in s: elem.cycle = s elem.sub = elem elem.set = WeakSet([elem]) def test_subclass_with_custom_hash(self): # Bug #1257731 class H(WeakSet): def __hash__(self): return int(id(self) & 0x7fffffff) s=H() f=set() f.add(s) self.assertIn(s, f) f.remove(s) f.add(s) f.discard(s) def test_init(self): s = WeakSet() s.__init__(self.items) self.assertEqual(s, self.s) s.__init__(self.items2) self.assertEqual(s, WeakSet(self.items2)) self.assertRaises(TypeError, s.__init__, s, 2); self.assertRaises(TypeError, s.__init__, 1); def test_constructor_identity(self): s = WeakSet(self.items) t = WeakSet(s) self.assertNotEqual(id(s), id(t)) def test_hash(self): self.assertRaises(TypeError, hash, self.s) def test_clear(self): self.s.clear() self.assertEqual(self.s, WeakSet([])) self.assertEqual(len(self.s), 0) def test_copy(self): dup = self.s.copy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) @support.impl_detail("refcounting", graalvm=False) def test_add(self): x = ustr('Q') self.s.add(x) self.assertIn(x, self.s) dup = self.s.copy() self.s.add(x) self.assertEqual(self.s, dup) self.assertRaises(TypeError, self.s.add, []) self.fs.add(Foo()) self.assertTrue(len(self.fs) == 1) self.fs.add(self.obj) self.assertTrue(len(self.fs) == 1) def test_remove(self): x = ustr('a') self.s.remove(x) self.assertNotIn(x, self.s) self.assertRaises(KeyError, self.s.remove, x) self.assertRaises(TypeError, self.s.remove, []) def test_discard(self): a, q = ustr('a'), ustr('Q') self.s.discard(a) self.assertNotIn(a, self.s) self.s.discard(q) self.assertRaises(TypeError, self.s.discard, []) def test_pop(self): for i in range(len(self.s)): elem = self.s.pop() self.assertNotIn(elem, self.s) self.assertRaises(KeyError, self.s.pop) def test_update(self): retval = self.s.update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): self.assertIn(c, self.s) self.assertRaises(TypeError, self.s.update, [[]]) def test_update_set(self): self.s.update(set(self.items2)) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_ior(self): self.s |= set(self.items2) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_intersection_update(self): retval = self.s.intersection_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.intersection_update, [[]]) def test_iand(self): self.s &= set(self.items2) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_difference_update(self): retval = self.s.difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.difference_update, [[]]) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_isub(self): self.s -= set(self.items2) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_symmetric_difference_update(self): retval = self.s.symmetric_difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_ixor(self): self.s ^= set(self.items2) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_inplace_on_self(self): t = self.s.copy() t |= t self.assertEqual(t, self.s) t &= t self.assertEqual(t, self.s) t -= t self.assertEqual(t, WeakSet()) t = self.s.copy() t ^= t self.assertEqual(t, WeakSet()) def test_eq(self): # issue 5964 self.assertTrue(self.s == self.s) self.assertTrue(self.s == WeakSet(self.items)) self.assertFalse(self.s == set(self.items)) self.assertFalse(self.s == list(self.items)) self.assertFalse(self.s == tuple(self.items)) self.assertFalse(self.s == WeakSet([Foo])) self.assertFalse(self.s == 1) def test_ne(self): self.assertTrue(self.s != set(self.items)) s1 = WeakSet() s2 = WeakSet() self.assertFalse(s1 != s2) @support.impl_detail("finalization", graalvm=False) def test_weak_destroy_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed # Create new items to be sure no-one else holds a reference items = [ustr(c) for c in ('a', 'b', 'c')] s = WeakSet(items) it = iter(s) next(it) # Trigger internal iteration # Destroy an item del items[-1] gc.collect() # just in case # We have removed either the first consumed items, or another one self.assertIn(len(list(it)), [len(items), len(items) - 1]) del it # The removal has been committed self.assertEqual(len(s), len(items)) @support.impl_detail("finalization", graalvm=False) def test_weak_destroy_and_mutate_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed items = [ustr(c) for c in string.ascii_letters] s = WeakSet(items) @contextlib.contextmanager def testcontext(): try: it = iter(s) # Start iterator yielded = ustr(str(next(it))) # Schedule an item for removal and recreate it u = ustr(str(items.pop())) if yielded == u: # The iterator still has a reference to the removed item, # advance it (issue #20006). next(it) gc.collect() # just in case yield u finally: it = None # should commit all removals with testcontext() as u: self.assertNotIn(u, s) with testcontext() as u: self.assertRaises(KeyError, s.remove, u) self.assertNotIn(u, s) with testcontext() as u: s.add(u) self.assertIn(u, s) t = s.copy() with testcontext() as u: s.update(t) self.assertEqual(len(s), len(t)) with testcontext() as u: s.clear() self.assertEqual(len(s), 0) @support.impl_detail("finalization", graalvm=False) def test_len_cycles(self): N = 20 items = [RefCycle() for i in range(N)] s = WeakSet(items) del items it = iter(s) try: next(it) except StopIteration: pass gc.collect() n1 = len(s) del it gc.collect() n2 = len(s) # one item may be kept alive inside the iterator self.assertIn(n1, (0, 1)) self.assertEqual(n2, 0) @support.impl_detail("finalization", graalvm=False) def test_len_race(self): # Extended sanity checks for len() in the face of cyclic collection self.addCleanup(gc.set_threshold, *gc.get_threshold()) for th in range(1, 100): N = 20 gc.collect(0) gc.set_threshold(th, th, th) items = [RefCycle() for i in range(N)] s = WeakSet(items) del items # All items will be collected at next garbage collection pass it = iter(s) try: next(it) except StopIteration: pass n1 = len(s) del it n2 = len(s) self.assertGreaterEqual(n1, 0) self.assertLessEqual(n1, N) self.assertGreaterEqual(n2, 0) self.assertLessEqual(n2, n1) def test_repr(self): assert repr(self.s) == repr(self.s.data)
class Signal(object): """Signal/slot implementation. Author: Thiago Marcos P. Santos Author: Christopher S. Case Author: David H. Bronke Created: August 28, 2008 Updated: December 12, 2011 License: MIT Sample usage: \code class Model(object): def __init__(self, value): self.__value = value self.changed = Signal() def set_value(self, value): self.__value = value self.changed() # Emit signal def get_value(self): return self.__value class View(object): def __init__(self, model): self.model = model model.changed.connect(self.model_changed) def model_changed(self): print(" New value:", self.model.get_value()) print("Beginning Tests:") model = Model(10) view1 = View(model) view2 = View(model) view3 = View(model) print("Setting value to 20...") model.set_value(20) print("Deleting a view, and setting value to 30...") del view1 model.set_value(30) print("Clearing all listeners, and setting value to 40...") model.changed.clear() model.set_value(40) print("Testing non-member function...") def bar(): print(" Calling Non Class Function!") model.changed.connect(bar) model.set_value(50) \endcode """ def __init__(self): """Initialize a new signal""" self._functions = WeakSet() self._methods = WeakKeyDictionary() def __call__(self, *args, **kargs): """Emits the signal and calls all connections""" # Call handler functions for func in self._functions: func(*args, **kargs) # Call handler methods for obj, funcs in self._methods.items(): for func in funcs: func(obj, *args, **kargs) def connect(self, slot): """Connects a slot to the signal so that when the signal is emitted, the slot is called.""" if inspect.ismethod(slot): if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) else: self._functions.add(slot) def disconnect(self, slot): """Disconnects a slot from the signal""" if inspect.ismethod(slot): if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) else: if slot in self._functions: self._functions.remove(slot) def clear(self): """Removes all slots from the signal""" self._functions.clear() self._methods.clear()
class TestWeakSet(unittest.TestCase): def setUp(self): # need to keep references to them self.items = [ustr(c) for c in ("a", "b", "c")] self.items2 = [ustr(c) for c in ("x", "y", "z")] self.letters = [ustr(c) for c in string.ascii_letters] self.s = WeakSet(self.items) self.d = dict.fromkeys(self.items) self.obj = ustr("F") self.fs = WeakSet([self.obj]) def test_methods(self): weaksetmethods = dir(WeakSet) for method in dir(set): if method == "test_c_api" or method.startswith("_"): continue self.assert_(method in weaksetmethods, "WeakSet missing method " + method) def test_new_or_init(self): self.assertRaises(TypeError, WeakSet, [], 2) def test_len(self): self.assertEqual(len(self.s), len(self.d)) self.assertEqual(len(self.fs), 1) del self.obj self.assertEqual(len(self.fs), 0) def test_contains(self): for c in self.letters: self.assertEqual(c in self.s, c in self.d) self.assertRaises(TypeError, self.s.__contains__, [[]]) self.assert_(self.obj in self.fs) del self.obj self.assert_(ustr("F") not in self.fs) def test_union(self): u = self.s.union(self.items2) for c in self.letters: self.assertEqual(c in u, c in self.d or c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(u), WeakSet) self.assertRaises(TypeError, self.s.union, [[]]) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet(self.items + self.items2) c = C(self.items2) self.assertEqual(self.s.union(c), x) def test_or(self): i = self.s.union(self.items2) self.assertEqual(self.s | set(self.items2), i) self.assertEqual(self.s | frozenset(self.items2), i) def test_intersection(self): i = self.s.intersection(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet([]) self.assertEqual(self.s.intersection(C(self.items2)), x) def test_isdisjoint(self): self.assert_(self.s.isdisjoint(WeakSet(self.items2))) self.assert_(not self.s.isdisjoint(WeakSet(self.letters))) def test_and(self): i = self.s.intersection(self.items2) self.assertEqual(self.s & set(self.items2), i) self.assertEqual(self.s & frozenset(self.items2), i) def test_difference(self): i = self.s.difference(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c not in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.difference, [[]]) def test_sub(self): i = self.s.difference(self.items2) self.assertEqual(self.s - set(self.items2), i) self.assertEqual(self.s - frozenset(self.items2), i) def test_symmetric_difference(self): i = self.s.symmetric_difference(self.items2) for c in self.letters: self.assertEqual(c in i, (c in self.d) ^ (c in self.items2)) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) def test_xor(self): i = self.s.symmetric_difference(self.items2) self.assertEqual(self.s ^ set(self.items2), i) self.assertEqual(self.s ^ frozenset(self.items2), i) def test_sub_and_super(self): pl, ql, rl = map(lambda s: [ustr(c) for c in s], ["ab", "abcde", "def"]) p, q, r = map(WeakSet, (pl, ql, rl)) self.assert_(p < q) self.assert_(p <= q) self.assert_(q <= q) self.assert_(q > p) self.assert_(q >= p) self.failIf(q < r) self.failIf(q <= r) self.failIf(q > r) self.failIf(q >= r) self.assert_(set("a").issubset("abc")) self.assert_(set("abc").issuperset("a")) self.failIf(set("a").issubset("cbs")) self.failIf(set("cbs").issuperset("a")) def test_gc(self): # Create a nest of cycles to exercise overall ref count check s = WeakSet(Foo() for i in range(1000)) for elem in s: elem.cycle = s elem.sub = elem elem.set = WeakSet([elem]) def test_subclass_with_custom_hash(self): # Bug #1257731 class H(WeakSet): def __hash__(self): return int(id(self) & 0x7FFFFFFF) s = H() f = set() f.add(s) self.assert_(s in f) f.remove(s) f.add(s) f.discard(s) def test_init(self): s = WeakSet() s.__init__(self.items) self.assertEqual(s, self.s) s.__init__(self.items2) self.assertEqual(s, WeakSet(self.items2)) self.assertRaises(TypeError, s.__init__, s, 2) self.assertRaises(TypeError, s.__init__, 1) def test_constructor_identity(self): s = WeakSet(self.items) t = WeakSet(s) self.assertNotEqual(id(s), id(t)) def test_hash(self): self.assertRaises(TypeError, hash, self.s) def test_clear(self): self.s.clear() self.assertEqual(self.s, WeakSet([])) self.assertEqual(len(self.s), 0) def test_copy(self): dup = self.s.copy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) def test_add(self): x = ustr("Q") self.s.add(x) self.assert_(x in self.s) dup = self.s.copy() self.s.add(x) self.assertEqual(self.s, dup) self.assertRaises(TypeError, self.s.add, []) self.fs.add(Foo()) self.assert_(len(self.fs) == 1) self.fs.add(self.obj) self.assert_(len(self.fs) == 1) def test_remove(self): x = ustr("a") self.s.remove(x) self.assert_(x not in self.s) self.assertRaises(KeyError, self.s.remove, x) self.assertRaises(TypeError, self.s.remove, []) def test_discard(self): a, q = ustr("a"), ustr("Q") self.s.discard(a) self.assert_(a not in self.s) self.s.discard(q) self.assertRaises(TypeError, self.s.discard, []) def test_pop(self): for i in range(len(self.s)): elem = self.s.pop() self.assert_(elem not in self.s) self.assertRaises(KeyError, self.s.pop) def test_update(self): retval = self.s.update(self.items2) self.assertEqual(retval, None) for c in self.items + self.items2: self.assert_(c in self.s) self.assertRaises(TypeError, self.s.update, [[]]) def test_update_set(self): self.s.update(set(self.items2)) for c in self.items + self.items2: self.assert_(c in self.s) def test_ior(self): self.s |= set(self.items2) for c in self.items + self.items2: self.assert_(c in self.s) def test_intersection_update(self): retval = self.s.intersection_update(self.items2) self.assertEqual(retval, None) for c in self.items + self.items2: if c in self.items2 and c in self.items: self.assert_(c in self.s) else: self.assert_(c not in self.s) self.assertRaises(TypeError, self.s.intersection_update, [[]]) def test_iand(self): self.s &= set(self.items2) for c in self.items + self.items2: if c in self.items2 and c in self.items: self.assert_(c in self.s) else: self.assert_(c not in self.s) def test_difference_update(self): retval = self.s.difference_update(self.items2) self.assertEqual(retval, None) for c in self.items + self.items2: if c in self.items and c not in self.items2: self.assert_(c in self.s) else: self.assert_(c not in self.s) self.assertRaises(TypeError, self.s.difference_update, [[]]) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_isub(self): self.s -= set(self.items2) for c in self.items + self.items2: if c in self.items and c not in self.items2: self.assert_(c in self.s) else: self.assert_(c not in self.s) def test_symmetric_difference_update(self): retval = self.s.symmetric_difference_update(self.items2) self.assertEqual(retval, None) for c in self.items + self.items2: if (c in self.items) ^ (c in self.items2): self.assert_(c in self.s) else: self.assert_(c not in self.s) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_ixor(self): self.s ^= set(self.items2) for c in self.items + self.items2: if (c in self.items) ^ (c in self.items2): self.assert_(c in self.s) else: self.assert_(c not in self.s) def test_inplace_on_self(self): t = self.s.copy() t |= t self.assertEqual(t, self.s) t &= t self.assertEqual(t, self.s) t -= t self.assertEqual(t, WeakSet()) t = self.s.copy() t ^= t self.assertEqual(t, WeakSet()) def test_eq(self): # issue 5964 self.assertTrue(self.s == self.s) self.assertTrue(self.s == WeakSet(self.items)) self.assertFalse(self.s == set(self.items)) self.assertFalse(self.s == list(self.items)) self.assertFalse(self.s == tuple(self.items)) self.assertFalse(self.s == WeakSet([Foo])) self.assertFalse(self.s == 1)
class Signal(object): """ basic signal emitter fired signals are added to this object's calling frame - if this becomes excessive, this also includes mode to add function calls to queue instead of directly firing connnected functions queue support not complete yet, as nothing I use needs it. """ queues = {"default": deque()} debugConnection = False def __init__(self, name="", queue="", useQueue=False): """:param queue : name of queue to use, or external queue object """ self.name = name self._functions = WeakSet() self._methods = WeakKeyDictionary() # separate register for references to explicitly strong functions # they are still placed in main weak sets above, # little bit spaghetti self._strongItemRefs = set() # is signal active self._active = True # event queue support self._useQueue = useQueue self._queue = queue or "default" def __hash__(self): return hash(id(self)) def __repr__(self): return f"Signal({self.name})" def __call__(self, *args, **kwargs): #print("emit", self.debugLog()) if not self._active: return queue = self.getQueue() # Call handler functions for func in list(self._functions): if self._useQueue: queue.append(partial(func, *args, **kwargs)) else: func(*args, **kwargs) # Call handler methods for obj, funcs in dict(self._methods).items(): for func in funcs: if self._useQueue: queue.append(partial(func, obj, *args, **kwargs)) else: func(obj, *args, **kwargs) def debugLog(self): return str((*self._functions, dict(self._methods))) def activate(self): self._active = True def mute(self): self._active = False def getQueue(self, name="default", create=True): """return one of the event queues attended by signal objects""" name = name or self._queue or "default" if not name in self.queues and create: self.queues[name] = deque() return self.queues[name] def setQueue(self, queueName): """ set signal to use given queue """ self._queue = queueName def emit(self, *args, **kwargs): """ brings this object up to rough parity with qt signals """ self(*args, **kwargs) def connect(self, slot, strong=False): """add given callable to function or method register flag as strong to allow local lambdas or closures""" if inspect.ismethod(slot): if self.debugConnection: print() try: hash(slot.__self__) if slot.__self__ not in self._methods: self._methods[slot.__self__] = set() self._methods[slot.__self__].add(slot.__func__) except TypeError: self._functions.add(slot) pass else: self._functions.add(slot) if strong: self._strongItemRefs.add(slot) def disconnect(self, slot): if inspect.ismethod(slot): try: hash(slot.__self__) if slot.__self__ in self._methods: self._methods[slot.__self__].remove(slot.__func__) return except TypeError: # self._functions.remove(slot) pass if slot in self._functions: self._functions.remove(slot) if slot in self._strongItemRefs: self._strongItemRefs.remove(slot) def disconnectFromPool(self, pool): """remove any function in the pool from signal's connections """ for fn in pool: self.disconnect(fn) def clear(self): self._functions.clear() self._methods.clear()
class Mission(Script): """Configure the game to run a mission.""" def __init__(self, name): self.name = name super(Mission, self).__init__() MISSIONS.append(self) self.handlers_installed = False def setup(self, game): """Called to bind the game to the mission. Subclasses should not need to override this. """ self.game = game self.world = game.world if not self.handlers_installed: self.world.push_handlers( self.on_object_shot, self.on_item_collected, self.on_object_tractored, self.on_region_entered, self.on_astronaut_death, self.on_object_destroyed, self.on_object_released) self.handlers_installed = True self.hud = self.game.world.hud # Set up clean state self.shot_messages = {} self.tractored_messages = {} self.region_message = None self.waiting_enter_region = False self.need_class = None self.must_tractor = None self.must_release = None self.must_earn = 0 self.critical_objects = [] self.target_objects = [] self.extra_params = {} self.persistent_items = WeakSet() # items to be killed if we restart self.nonpersistent_items = WeakSet( ) # items to be killed at mission end # Clear any leftover messages self.hud.clear_messages() def clear_items(self, nonpersistent_only=True): items = list(self.nonpersistent_items) self.nonpersistent_items.clear() if not nonpersistent_only: items.extend(self.persistent_items) self.persistent_items.clear() for o in items: try: o.kill() except Exception: # doesn't matter if it's already dead try: self.world.kill(o) except Exception: pass @script def say(self, message, colour=hud.DEFAULT_COLOUR, delay=3, sound=message_sound): """Record a message that will be shown on the message window.""" self.game.say(message, colour=colour) sound.play() self.wait(delay) def goal(self, title): self.say("New mission: " + title, colour=GREEN, delay=0, sound=goal_sound) @script def spawn(self, *args, **kwargs): """Spawn a thing.""" self.do_spawn(*args, **kwargs) @script def spawn_above_moonbase(self, class_name, *args, **kwargs): """Spawn a thing above the moonbase, wherever it may be right now.""" moon = self.world.moon position = moon.position + v(0, 220).rotated(-moon.rotation) self.do_spawn(class_name, position, *args, **kwargs) def do_spawn(self, class_name, position, signpost=None, id=None, persistent=True, delay=0, **kwargs): # Destroy any existing instance that may exist if id: try: inst = self.world.get_by_id(id) except KeyError: pass else: self.world.kill(inst) module, clsname = class_name.rsplit('.', 1) __import__(module) cls = getattr(sys.modules[module], clsname) inst = cls(self.game.world, position=position, **kwargs) if signpost: if not isinstance(signpost, six.string_types): signpost = inst.name signpost = Signpost(self.game.world, signpost, inst, GOLD) self.nonpersistent_items.add(signpost) label = None if getattr(inst, 'name', None): label = TrackingLabel(self.world, inst.name, follow=inst) self.world.spawn(label) self.nonpersistent_items.add(label) if id: self.world.set_id(inst, id) self.extra_params[id] = inst if persistent: self.persistent_items.add(inst) else: self.nonpersistent_items.add(inst) self.wait(delay) @script def show_signpost(self, id, text=None): """Re-show a signpost for a persistent named object from a previous mission.""" inst = self.world.get_by_id(id) text = text or inst.name signpost = Signpost(self.world, text, inst, GOLD) self.nonpersistent_items.add(signpost) self.next() @script def player_must_collect(self, class_name, number=1): """Wait for the player to collect number of the item denoted by class_name. class_name is a string of the form module.Class, so that no importing needs to occur to define the missions. """ self.need_class = class_name self.needed = number @script def player_must_tractor(self, id): """Wait for the player to tractor the given item.""" self.must_tractor = id @script def player_must_release(self, id): """Wait for the player to release the given item.""" self.must_release = (id, None) @script def player_must_release_in_region(self, id, pos, radius): """Wait for the player to release the given item.""" self.must_release = (id, (pos, radius)) @script def say_if_object_shot(self, class_name, message, colour=hud.DEFAULT_COLOUR): """Add a one-off message if an object of a given class is shot.""" self.shot_messages[class_name] = (message, colour) self.next() @script def say_if_object_tractored(self, class_name, message, colour=hud.DEFAULT_COLOUR): """Add a one-off message if an object of a given class is tractored.""" self.tractored_messages[class_name] = (message, colour) self.next() @script def say_if_region_entered(self, position, radius, message, colour=hud.DEFAULT_COLOUR): """Add a one-off message if the player enters a particular region.""" self.world.set_target_region(position, radius) self.region_message = (message, colour) self.next() @script def fail_if_object_destroyed(self, id): """Fail the mission if the object with the given id is destroyed.""" self.critical_objects.append(id) self.next() @script def player_must_enter_region(self, position, radius): """Add a one-off message if the player enters a particular region.""" self.waiting_enter_region = True self.world.set_target_region(position, radius) @script def player_must_destroy(self, id): """Wait for the player to destroy the object with the given id.""" self.target_objects.append(id) @script def player_must_earn(self, credits): """Wait for player to earn certain number of credits""" self.must_earn = credits @script def set_time_limit(self, t): """Set a time limit to complete the next activity.""" self.time_limit = int(t) self.hud.set_countdown(self.time_limit) pyglet.clock.schedule_interval(self.on_clock_tick, 1) self.next() @script def clear_time_limit(self): """Set a time limit to complete the next activity.""" pyglet.clock.unschedule(self.on_clock_tick) self.hud.clear_countdown() self.next() @contextmanager def time_limit(self, t): self.set_time_limit(t) yield self.clear_time_limit() def on_clock_tick(self, dt): self.time_limit -= 1 if self.time_limit < 0: self.hud.clear_countdown() pyglet.clock.unschedule(self.on_clock_tick) self.game.say('You ran out of time!', colour=RED) self.dispatch_event('on_failure') else: self.hud.set_countdown(self.time_limit) def on_region_entered(self): if self.region_message: self.game.say(*self.region_message) self.region_message = None if self.waiting_enter_region: self.waiting_enter_region = False self.next() def on_item_collected(self, collector, item): if get_class_name(item) == self.need_class: self.needed -= 1 if self.needed <= 0: self.need_class = None self.next() else: self.game.say('Good work! You need to collect %d more.' % self.needed, colour=GREEN) elif self.must_earn > 0: self.must_earn -= item.VALUE if self.must_earn <= 0: self.next() else: self.game.say( 'Good work! You need to collect %d more credits.' % self.must_earn) def on_object_shot(self, item): try: message, colour = self.shot_messages.pop(get_class_name(item)) except KeyError: pass else: self.game.say(message, colour=colour) def on_object_destroyed(self, item): if not hasattr(item, 'id'): return if item.id in self.critical_objects: self.game.say("{control}: Mission critical object was destroyed!", colour=RED) self.dispatch_event("on_failure") elif item.id in self.target_objects: self.target_objects.pop(self.target_objects.index(item.id)) if len(self.target_objects) == 0: self.game.say("{control}: All targets destroyed!") self.next() def on_object_tractored(self, item): try: message, colour = self.tractored_messages.pop(get_class_name(item)) except KeyError: pass else: self.game.say(message, colour=colour) if self.must_tractor: if getattr(item, 'id', None) == self.must_tractor: self.must_tractor = None self.next() def on_object_released(self, item): if self.must_release: id, pos = self.must_release if getattr(item, 'id', None) == id: if pos: # If a radius was specified, check we released within it p, r = pos if (item.position - p).length2 > r * r: return self.must_release = None self.next() def on_astronaut_death(self, astronaut): self.game.say("{control}: Oh my god! You killed %s! You bastard!" % astronaut.name) def on_failure(self): self.game.say("{control}: Mission failed! Try again.", colour=RED) def finish(self): pyglet.clock.unschedule(self.next) pyglet.clock.unschedule(self.next) pyglet.clock.unschedule(self.on_clock_tick) self.clear_items() self.clear_time_limit() self.extra_params = {} self.world.clear_target_region() if self.handlers_installed: self.world.pop_handlers() self.handlers_installed = False def rewind(self): """Finish and revert state to before the mission.""" self.finish() self.clear_items(False) def restart(self, *args): """Rewind to the start of the mission and then start it afresh.""" self.rewind() self.setup(self.game) # reinstate handlers self.start() def skip(self): """Skip the mission, but set any persistent state.""" super(Mission, self).skip() self.hud.clear_messages() self.finish()
class ServiceBusClient(object): """The ServiceBusClient class defines a high level interface for getting ServiceBusSender and ServiceBusReceiver. :ivar fully_qualified_namespace: The fully qualified host name for the Service Bus namespace. The namespace format is: `<yournamespace>.servicebus.windows.net`. :vartype fully_qualified_namespace: str :param str fully_qualified_namespace: The fully qualified host name for the Service Bus namespace. The namespace format is: `<yournamespace>.servicebus.windows.net`. :param credential: The credential object used for authentication which implements a particular interface for getting tokens. It accepts credential objects generated by the azure-identity library and objects that implement the `get_token(self, *scopes)` method, or alternatively, an AzureSasCredential can be provided too. :type credential: ~azure.core.credentials.TokenCredential or ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials.AzureNamedKeyCredential :keyword bool logging_enable: Whether to output network trace logs to the logger. Default is `False`. :keyword transport_type: The type of transport protocol that will be used for communicating with the Service Bus service. Default is `TransportType.Amqp` in which case port 5671 is used. If the port 5671 is unavailable/blocked in the network environment, `TransportType.AmqpOverWebsocket` could be used instead which uses port 443 for communication. :paramtype transport_type: ~azure.servicebus.TransportType :keyword Dict http_proxy: HTTP proxy settings. This must be a dictionary with the following keys: `'proxy_hostname'` (str value) and `'proxy_port'` (int value). Additionally the following keys may also be present: `'username', 'password'`. :keyword str user_agent: If specified, this will be added in front of the built-in user agent string. :keyword int retry_total: The total number of attempts to redo a failed operation when an error occurs. Default value is 3. :keyword float retry_backoff_factor: Delta back-off internal in the unit of second between retries. Default value is 0.8. :keyword float retry_backoff_max: Maximum back-off interval in the unit of second. Default value is 120. :keyword retry_mode: The delay behavior between retry attempts. Supported values are 'fixed' or 'exponential', where default is 'exponential'. :paramtype retry_mode: str .. admonition:: Example: .. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py :start-after: [START create_sb_client_sync] :end-before: [END create_sb_client_sync] :language: python :dedent: 4 :caption: Create a new instance of the ServiceBusClient. """ def __init__(self, fully_qualified_namespace: str, credential: Union["TokenCredential", "AzureSasCredential", "AzureNamedKeyCredential"], *, retry_total: int = 3, retry_backoff_factor: float = 0.8, retry_backoff_max: int = 120, retry_mode: str = "exponential", **kwargs: Any) -> None: # If the user provided http:// or sb://, let's be polite and strip that. self.fully_qualified_namespace = strip_protocol_from_uri( fully_qualified_namespace.strip()) self._credential = credential self._config = Configuration(retry_total=retry_total, retry_backoff_factor=retry_backoff_factor, retry_backoff_max=retry_backoff_max, retry_mode=retry_mode, **kwargs) self._connection = None # Optional entity name, can be the name of Queue or Topic. Intentionally not advertised, typically be needed. self._entity_name = kwargs.get("entity_name") self._auth_uri = "sb://{}".format(self.fully_qualified_namespace) if self._entity_name: self._auth_uri = "{}/{}".format(self._auth_uri, self._entity_name) # Internal flag for switching whether to apply connection sharing, pending fix in uamqp library self._connection_sharing = False self._handlers = WeakSet() # type: WeakSet def __enter__(self): if self._connection_sharing: self._create_uamqp_connection() return self def __exit__(self, *args): self.close() def _create_uamqp_connection(self): auth = create_authentication(self) self._connection = uamqp.Connection( hostname=self.fully_qualified_namespace, sasl=auth, debug=self._config.logging_enable, ) def close(self): # type: () -> None """ Close down the ServiceBus client. All spawned senders, receivers and underlying connection will be shutdown. :return: None """ for handler in self._handlers: try: handler.close() except Exception as exception: # pylint: disable=broad-except _LOGGER.error( "Client has met an exception when closing the handler: %r. Exception: %r.", handler._container_id, # pylint: disable=protected-access exception, ) self._handlers.clear() if self._connection_sharing and self._connection: self._connection.destroy() @classmethod def from_connection_string(cls, conn_str: str, *, retry_total: int = 3, retry_backoff_factor: float = 0.8, retry_backoff_max: int = 120, retry_mode: str = "exponential", **kwargs: Any) -> "ServiceBusClient": """ Create a ServiceBusClient from a connection string. :param str conn_str: The connection string of a Service Bus. :keyword bool logging_enable: Whether to output network trace logs to the logger. Default is `False`. :keyword transport_type: The type of transport protocol that will be used for communicating with the Service Bus service. Default is `TransportType.Amqp` in which case port 5671 is used. If the port 5671 is unavailable/blocked in the network environment, `TransportType.AmqpOverWebsocket` could be used instead which uses port 443 for communication. :paramtype transport_type: ~azure.servicebus.TransportType :keyword Dict http_proxy: HTTP proxy settings. This must be a dictionary with the following keys: `'proxy_hostname'` (str value) and `'proxy_port'` (int value). Additionally the following keys may also be present: `'username', 'password'`. :keyword str user_agent: If specified, this will be added in front of the built-in user agent string. :keyword int retry_total: The total number of attempts to redo a failed operation when an error occurs. Default value is 3. :keyword float retry_backoff_factor: Delta back-off internal in the unit of second between retries. Default value is 0.8. :keyword float retry_backoff_max: Maximum back-off interval in the unit of second. Default value is 120. :keyword retry_mode: The delay behavior between retry attempts. Supported values are 'fixed' or 'exponential', where default is 'exponential'. :paramtype retry_mode: str :rtype: ~azure.servicebus.ServiceBusClient .. admonition:: Example: .. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py :start-after: [START create_sb_client_from_conn_str_sync] :end-before: [END create_sb_client_from_conn_str_sync] :language: python :dedent: 4 :caption: Create a new instance of the ServiceBusClient from connection string. """ host, policy, key, entity_in_conn_str, token, token_expiry = _parse_conn_str( conn_str) if token and token_expiry: credential = ServiceBusSASTokenCredential(token, token_expiry) elif policy and key: credential = ServiceBusSharedKeyCredential(policy, key) # type: ignore return cls( fully_qualified_namespace=host, entity_name=entity_in_conn_str or kwargs.pop("entity_name", None), credential=credential, # type: ignore retry_total=retry_total, retry_backoff_factor=retry_backoff_factor, retry_backoff_max=retry_backoff_max, retry_mode=retry_mode, **kwargs) def get_queue_sender(self, queue_name, **kwargs): # type: (str, Any) -> ServiceBusSender """Get ServiceBusSender for the specific queue. :param str queue_name: The path of specific Service Bus Queue the client connects to. :rtype: ~azure.servicebus.ServiceBusSender .. admonition:: Example: .. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py :start-after: [START create_servicebus_sender_from_sb_client_sync] :end-before: [END create_servicebus_sender_from_sb_client_sync] :language: python :dedent: 4 :caption: Create a new instance of the ServiceBusSender from ServiceBusClient. """ # pylint: disable=protected-access if self._entity_name and queue_name != self._entity_name: raise ValueError( "The queue name provided does not match the EntityPath in " "the connection string used to construct the ServiceBusClient." ) handler = ServiceBusSender( fully_qualified_namespace=self.fully_qualified_namespace, queue_name=queue_name, credential=self._credential, logging_enable=self._config.logging_enable, transport_type=self._config.transport_type, http_proxy=self._config.http_proxy, connection=self._connection, user_agent=self._config.user_agent, retry_mode=self._config.retry_mode, retry_total=self._config.retry_total, retry_backoff_factor=self._config.retry_backoff_factor, retry_backoff_max=self._config.retry_backoff_max, **kwargs) self._handlers.add(handler) return handler def get_queue_receiver( self, queue_name: str, *, session_id: Optional[Union[str, NextAvailableSessionType]] = None, sub_queue: Optional[Union[ServiceBusSubQueue, str]] = None, receive_mode: Union[ServiceBusReceiveMode, str] = ServiceBusReceiveMode.PEEK_LOCK, max_wait_time: Optional[float] = None, auto_lock_renewer: Optional[AutoLockRenewer] = None, prefetch_count: int = 0, **kwargs: Any) -> ServiceBusReceiver: """Get ServiceBusReceiver for the specific queue. :param str queue_name: The path of specific Service Bus Queue the client connects to. :keyword session_id: A specific session from which to receive. This must be specified for a sessionful queue, otherwise it must be None. In order to receive messages from the next available session, set this to ~azure.servicebus.NEXT_AVAILABLE_SESSION. :paramtype session_id: str or ~azure.servicebus.NEXT_AVAILABLE_SESSION :keyword sub_queue: If specified, the subqueue this receiver will connect to. This includes the DEAD_LETTER and TRANSFER_DEAD_LETTER queues, holds messages that can't be delivered to any receiver or messages that can't be processed. The default is None, meaning connect to the primary queue. Can be assigned values from `ServiceBusSubQueue` enum or equivalent string values "deadletter" and "transferdeadletter". :paramtype sub_queue: str or ~azure.servicebus.ServiceBusSubQueue :keyword receive_mode: The receive_mode with which messages will be retrieved from the entity. The two options are PEEK_LOCK and RECEIVE_AND_DELETE. Messages received with PEEK_LOCK must be settled within a given lock period before they will be removed from the queue. Messages received with RECEIVE_AND_DELETE will be immediately removed from the queue, and cannot be subsequently rejected or re-received if the client fails to process the message. The default receive_mode is PEEK_LOCK. :paramtype receive_mode: Union[~azure.servicebus.ServiceBusReceiveMode, str] :keyword Optional[float] max_wait_time: The timeout in seconds between received messages after which the receiver will automatically stop receiving. The default value is None, meaning no timeout. :keyword Optional[~azure.servicebus.AutoLockRenewer] auto_lock_renewer: An ~azure.servicebus.AutoLockRenewer can be provided such that messages are automatically registered on receipt. If the receiver is a session receiver, it will apply to the session instead. :keyword int prefetch_count: The maximum number of messages to cache with each request to the service. This setting is only for advanced performance tuning. Increasing this value will improve message throughput performance but increase the chance that messages will expire while they are cached if they're not processed fast enough. The default value is 0, meaning messages will be received from the service and processed one at a time. In the case of prefetch_count being 0, `ServiceBusReceiver.receive` would try to cache `max_message_count` (if provided) within its request to the service. :rtype: ~azure.servicebus.ServiceBusReceiver .. admonition:: Example: .. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py :start-after: [START create_servicebus_receiver_from_sb_client_sync] :end-before: [END create_servicebus_receiver_from_sb_client_sync] :language: python :dedent: 4 :caption: Create a new instance of the ServiceBusReceiver from ServiceBusClient. """ if self._entity_name and queue_name != self._entity_name: raise ValueError( "The queue name provided does not match the EntityPath in " "the connection string used to construct the ServiceBusClient." ) if sub_queue and session_id: raise ValueError( "session_id and sub_queue can not be specified simultaneously. " "To connect to the sub queue of a sessionful queue, " "please set sub_queue only as sub_queue does not support session." ) try: queue_name = generate_dead_letter_entity_name( queue_name=queue_name, transfer_deadletter=(ServiceBusSubQueue(sub_queue) == ServiceBusSubQueue.TRANSFER_DEAD_LETTER), ) except ValueError: if ( sub_queue ): # If we got here and sub_queue is defined, it's an incorrect value or something unrelated. raise # pylint: disable=protected-access handler = ServiceBusReceiver( fully_qualified_namespace=self.fully_qualified_namespace, entity_name=queue_name, credential=self._credential, logging_enable=self._config.logging_enable, transport_type=self._config.transport_type, http_proxy=self._config.http_proxy, connection=self._connection, user_agent=self._config.user_agent, retry_mode=self._config.retry_mode, retry_total=self._config.retry_total, retry_backoff_factor=self._config.retry_backoff_factor, retry_backoff_max=self._config.retry_backoff_max, session_id=session_id, sub_queue=sub_queue, receive_mode=receive_mode, max_wait_time=max_wait_time, auto_lock_renewer=auto_lock_renewer, prefetch_count=prefetch_count, **kwargs) self._handlers.add(handler) return handler def get_topic_sender(self, topic_name, **kwargs): # type: (str, Any) -> ServiceBusSender """Get ServiceBusSender for the specific topic. :param str topic_name: The path of specific Service Bus Topic the client connects to. :rtype: ~azure.servicebus.ServiceBusSender .. admonition:: Example: .. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py :start-after: [START create_topic_sender_from_sb_client_sync] :end-before: [END create_topic_sender_from_sb_client_sync] :language: python :dedent: 4 :caption: Create a new instance of the ServiceBusSender from ServiceBusClient. """ if self._entity_name and topic_name != self._entity_name: raise ValueError( "The topic name provided does not match the EntityPath in " "the connection string used to construct the ServiceBusClient." ) handler = ServiceBusSender( fully_qualified_namespace=self.fully_qualified_namespace, topic_name=topic_name, credential=self._credential, logging_enable=self._config.logging_enable, transport_type=self._config.transport_type, http_proxy=self._config.http_proxy, connection=self._connection, user_agent=self._config.user_agent, retry_mode=self._config.retry_mode, retry_total=self._config.retry_total, retry_backoff_factor=self._config.retry_backoff_factor, retry_backoff_max=self._config.retry_backoff_max, **kwargs) self._handlers.add(handler) return handler def get_subscription_receiver( self, topic_name: str, subscription_name: str, *, session_id: Optional[Union[str, NextAvailableSessionType]] = None, sub_queue: Optional[Union[ServiceBusSubQueue, str]] = None, receive_mode: Union[ServiceBusReceiveMode, str] = ServiceBusReceiveMode.PEEK_LOCK, max_wait_time: Optional[float] = None, auto_lock_renewer: Optional[AutoLockRenewer] = None, prefetch_count: int = 0, **kwargs: Any) -> ServiceBusReceiver: """Get ServiceBusReceiver for the specific subscription under the topic. :param str topic_name: The name of specific Service Bus Topic the client connects to. :param str subscription_name: The name of specific Service Bus Subscription under the given Service Bus Topic. :keyword session_id: A specific session from which to receive. This must be specified for a sessionful subscription, otherwise it must be None. In order to receive messages from the next available session, set this to ~azure.servicebus.NEXT_AVAILABLE_SESSION. :paramtype session_id: str or ~azure.servicebus.NEXT_AVAILABLE_SESSION :keyword sub_queue: If specified, the subqueue this receiver will connect to. This includes the DEAD_LETTER and TRANSFER_DEAD_LETTER queues, holds messages that can't be delivered to any receiver or messages that can't be processed. The default is None, meaning connect to the primary queue. Can be assigned values from `ServiceBusSubQueue` enum or equivalent string values "deadletter" and "transferdeadletter". :paramtype sub_queue: str or ~azure.servicebus.ServiceBusSubQueue :keyword receive_mode: The receive_mode with which messages will be retrieved from the entity. The two options are PEEK_LOCK and RECEIVE_AND_DELETE. Messages received with PEEK_LOCK must be settled within a given lock period before they will be removed from the subscription. Messages received with RECEIVE_AND_DELETE will be immediately removed from the subscription, and cannot be subsequently rejected or re-received if the client fails to process the message. The default receive_mode is PEEK_LOCK. :paramtype receive_mode: Union[~azure.servicebus.ServiceBusReceiveMode, str] :keyword Optional[float] max_wait_time: The timeout in seconds between received messages after which the receiver will automatically stop receiving. The default value is None, meaning no timeout. :keyword Optional[~azure.servicebus.AutoLockRenewer] auto_lock_renewer: An ~azure.servicebus.AutoLockRenewer can be provided such that messages are automatically registered on receipt. If the receiver is a session receiver, it will apply to the session instead. :keyword int prefetch_count: The maximum number of messages to cache with each request to the service. This setting is only for advanced performance tuning. Increasing this value will improve message throughput performance but increase the chance that messages will expire while they are cached if they're not processed fast enough. The default value is 0, meaning messages will be received from the service and processed one at a time. In the case of prefetch_count being 0, `ServiceBusReceiver.receive` would try to cache `max_message_count` (if provided) within its request to the service. :rtype: ~azure.servicebus.ServiceBusReceiver .. admonition:: Example: .. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py :start-after: [START create_subscription_receiver_from_sb_client_sync] :end-before: [END create_subscription_receiver_from_sb_client_sync] :language: python :dedent: 4 :caption: Create a new instance of the ServiceBusReceiver from ServiceBusClient. """ # pylint: disable=protected-access if self._entity_name and topic_name != self._entity_name: raise ValueError( "The topic name provided does not match the EntityPath in " "the connection string used to construct the ServiceBusClient." ) if sub_queue and session_id: raise ValueError( "session_id and sub_queue can not be specified simultaneously. " "To connect to the sub queue of a sessionful subscription, " "please set sub_queue only as sub_queue is not sessionful.") try: entity_name = generate_dead_letter_entity_name( topic_name=topic_name, subscription_name=subscription_name, transfer_deadletter=(ServiceBusSubQueue(sub_queue) == ServiceBusSubQueue.TRANSFER_DEAD_LETTER), ) handler = ServiceBusReceiver( fully_qualified_namespace=self.fully_qualified_namespace, entity_name=entity_name, credential=self._credential, logging_enable=self._config.logging_enable, transport_type=self._config.transport_type, http_proxy=self._config.http_proxy, connection=self._connection, user_agent=self._config.user_agent, retry_mode=self._config.retry_mode, retry_total=self._config.retry_total, retry_backoff_factor=self._config.retry_backoff_factor, retry_backoff_max=self._config.retry_backoff_max, session_id=session_id, sub_queue=sub_queue, receive_mode=receive_mode, max_wait_time=max_wait_time, auto_lock_renewer=auto_lock_renewer, prefetch_count=prefetch_count, **kwargs) except ValueError: if ( sub_queue ): # If we got here and sub_queue is defined, it's an incorrect value or something unrelated. raise handler = ServiceBusReceiver( fully_qualified_namespace=self.fully_qualified_namespace, topic_name=topic_name, subscription_name=subscription_name, credential=self._credential, logging_enable=self._config.logging_enable, transport_type=self._config.transport_type, http_proxy=self._config.http_proxy, connection=self._connection, user_agent=self._config.user_agent, retry_mode=self._config.retry_mode, retry_total=self._config.retry_total, retry_backoff_factor=self._config.retry_backoff_factor, retry_backoff_max=self._config.retry_backoff_max, session_id=session_id, sub_queue=sub_queue, receive_mode=receive_mode, max_wait_time=max_wait_time, auto_lock_renewer=auto_lock_renewer, prefetch_count=prefetch_count, **kwargs) self._handlers.add(handler) return handler
class Agent(AgentT, Service): """Agent. This is the type of object returned by the ``@app.agent`` decorator. """ # supervisor is None until the agent is started so we cast to simplify. supervisor: SupervisorStrategyT = cast(SupervisorStrategyT, None) # channel is loaded lazily on .channel property access # to make sure configuration is not accessed when agent created # at module-scope. _channel: Optional[ChannelT] = None _channel_arg: Optional[Union[str, ChannelT]] _channel_kwargs: Dict[str, Any] _channel_iterator: Optional[AsyncIterator] = None _sinks: List[SinkT] _actors: MutableSet[ActorRefT] _actor_by_partition: MutableMapping[TP, ActorRefT] #: This mutable set is used by the first agent we start, #: so that we can update its active_partitions later #: (in on_partitions_assigned, when we know what partitions we get). _pending_active_partitions: Optional[Set[TP]] = None _first_assignment_done: bool = False def __init__( self, fun: AgentFun, *, app: AppT, name: str = None, channel: Union[str, ChannelT] = None, concurrency: int = 1, sink: Iterable[SinkT] = None, on_error: AgentErrorHandler = None, supervisor_strategy: Type[SupervisorStrategyT] = None, help: str = None, schema: SchemaT = None, key_type: ModelArg = None, value_type: ModelArg = None, isolated_partitions: bool = False, use_reply_headers: bool = None, **kwargs: Any, ) -> None: self.app = app self.fun: AgentFun = fun self.name = name or canonshortname(self.fun) # key-type/value_type arguments only apply when a channel # is not set if schema is not None: assert channel is None or isinstance(channel, str) if key_type is not None: assert channel is None or isinstance(channel, str) self._key_type = key_type if value_type is not None: assert channel is None or isinstance(channel, str) self._schema = schema self._value_type = value_type self._channel_arg = channel self._channel_kwargs = kwargs self.concurrency = concurrency or 1 self.isolated_partitions = isolated_partitions self.help = help or "" self._sinks = list(sink) if sink is not None else [] self._on_error: Optional[AgentErrorHandler] = on_error self.supervisor_strategy = supervisor_strategy self._actors = WeakSet() self._actor_by_partition = WeakValueDictionary() if self.isolated_partitions and self.concurrency > 1: raise ImproperlyConfigured( "Agent concurrency must be 1 when using isolated partitions") self.use_reply_headers = use_reply_headers Service.__init__(self) def on_init_dependencies(self) -> Iterable[ServiceT]: """Return list of services dependencies required to start agent.""" # Agent service is now a child of app. self.beacon.reattach(self.app.agents.beacon) return [] def actor_tracebacks(self) -> List[str]: return [actor.traceback() for actor in self._actors] async def _start_one( self, *, index: Optional[int] = None, active_partitions: Optional[Set[TP]] = None, stream: StreamT = None, channel: ChannelT = None, ) -> ActorT: # an index of None means there's only one instance, # and `index is None` is used as a test by functions that # disallows concurrency. index = index if self.concurrency > 1 else None return await self._start_task( index=index, active_partitions=active_partitions, stream=stream, channel=channel, beacon=self.beacon, ) async def _start_one_supervised( self, index: Optional[int] = None, active_partitions: Optional[Set[TP]] = None, stream: StreamT = None, ) -> ActorT: aref = await self._start_one( index=index, active_partitions=active_partitions, stream=stream, ) self.supervisor.add(aref) await aref.maybe_start() return aref async def _start_for_partitions(self, active_partitions: Set[TP]) -> ActorT: assert active_partitions self.log.info("Starting actor for partitions %s", active_partitions) return await self._start_one_supervised(None, active_partitions) async def on_start(self) -> None: """Call when an agent starts.""" self.supervisor = self._new_supervisor() await self._on_start_supervisor() def _new_supervisor(self) -> SupervisorStrategyT: return self._get_supervisor_strategy()( max_restarts=100.0, over=1.0, replacement=self._replace_actor, loop=self.loop, beacon=self.beacon, ) async def _replace_actor(self, service: ServiceT, index: int) -> ServiceT: aref = cast(ActorRefT, service) return await self._start_one( index=index, active_partitions=aref.active_partitions, stream=aref.stream, channel=cast(ChannelT, aref.stream.channel), ) def _get_supervisor_strategy(self) -> Type[SupervisorStrategyT]: SupervisorStrategy = self.supervisor_strategy if SupervisorStrategy is None: return cast(Type[SupervisorStrategyT], self.app.conf.agent_supervisor) else: return SupervisorStrategy async def _on_start_supervisor(self) -> None: active_partitions = self._get_active_partitions() channel: ChannelT = cast(ChannelT, None) for i in range(self.concurrency): res = await self._start_one( index=i, active_partitions=active_partitions, channel=channel, ) if channel is None: # First concurrency actor creates channel, # then we reuse it for --concurrency=n. # This way they share the same queue. channel = res.stream.channel self.supervisor.add(res) await self.supervisor.start() def _get_active_partitions(self) -> Optional[Set[TP]]: active_partitions: Optional[Set[TP]] = None if self.isolated_partitions: # when we start our first agent, we create the set of # partitions early, and save it in ._pending_active_partitions. # That way we can update the set once partitions are assigned, # and the actor we started may be assigned one of the partitions. active_partitions = self._pending_active_partitions = set() return active_partitions async def on_stop(self) -> None: """Call when an agent stops.""" # Agents iterate over infinite streams, so we cannot wait for it # to stop. # Instead we cancel it and this forces the stream to ack the # last message processed (but not the message causing the error # to be raised). await self._stop_supervisor() with suppress(asyncio.CancelledError): await asyncio.gather(*[ aref.actor_task for aref in self._actors if aref.actor_task is not None ]) self._actors.clear() async def _stop_supervisor(self) -> None: if self.supervisor: await self.supervisor.stop() self.supervisor = cast(SupervisorStrategyT, None) def cancel(self) -> None: """Cancel agent and its actor instances running in this process.""" for aref in self._actors: aref.cancel() async def on_partitions_revoked(self, revoked: Set[TP]) -> None: """Call when partitions are revoked.""" T = traced_from_parent_span() if self.isolated_partitions: # isolated: start/stop actors for each partition await T(self.on_isolated_partitions_revoked)(revoked) else: await T(self.on_shared_partitions_revoked)(revoked) async def on_partitions_assigned(self, assigned: Set[TP]) -> None: """Call when partitions are assigned.""" T = traced_from_parent_span() if self.isolated_partitions: await T(self.on_isolated_partitions_assigned)(assigned) else: await T(self.on_shared_partitions_assigned)(assigned) async def on_isolated_partitions_revoked(self, revoked: Set[TP]) -> None: """Call when isolated partitions are revoked.""" self.log.dev("Partitions revoked") T = traced_from_parent_span() for tp in revoked: aref: Optional[ActorRefT] = self._actor_by_partition.pop(tp, None) if aref is not None: await T(aref.on_isolated_partition_revoked)(tp) async def on_isolated_partitions_assigned(self, assigned: Set[TP]) -> None: """Call when isolated partitions are assigned.""" T = traced_from_parent_span() for tp in sorted(assigned): await T(self._assign_isolated_partition)(tp) async def _assign_isolated_partition(self, tp: TP) -> None: T = traced_from_parent_span() if not self._first_assignment_done and not self._actor_by_partition: self._first_assignment_done = True # if this is the first time we are assigned # we need to reassign the agent we started at boot to # one of the partitions. T(self._on_first_isolated_partition_assigned)(tp) await T(self._maybe_start_isolated)(tp) def _on_first_isolated_partition_assigned(self, tp: TP) -> None: assert self._actors assert len(self._actors) == 1 self._actor_by_partition[tp] = next(iter(self._actors)) if self._pending_active_partitions is not None: assert not self._pending_active_partitions self._pending_active_partitions.add(tp) async def _maybe_start_isolated(self, tp: TP) -> None: try: aref = self._actor_by_partition[tp] except KeyError: aref = await self._start_isolated(tp) self._actor_by_partition[tp] = aref await aref.on_isolated_partition_assigned(tp) async def _start_isolated(self, tp: TP) -> ActorT: return await self._start_for_partitions({tp}) async def on_shared_partitions_revoked(self, revoked: Set[TP]) -> None: """Call when non-isolated partitions are revoked.""" ... async def on_shared_partitions_assigned(self, assigned: Set[TP]) -> None: """Call when non-isolated partitions are assigned.""" ... def info(self) -> Mapping: """Return agent attributes as a dictionary.""" return { "app": self.app, "fun": self.fun, "name": self.name, "channel": self.channel, "concurrency": self.concurrency, "help": self.help, "sink": self._sinks, "on_error": self._on_error, "supervisor_strategy": self.supervisor_strategy, "isolated_partitions": self.isolated_partitions, } def clone(self, *, cls: Type[AgentT] = None, **kwargs: Any) -> AgentT: """Create clone of this agent object. Keyword arguments can be passed to override any argument supported by :class:`Agent.__init__ <Agent>`. """ return (cls or type(self))(**{**self.info(), **kwargs}) def test_context( self, channel: ChannelT = None, supervisor_strategy: SupervisorStrategyT = None, on_error: AgentErrorHandler = None, **kwargs: Any, ) -> AgentTestWrapperT: # pragma: no cover """Create new unit-testing wrapper for this agent.""" # flow control into channel queues are disabled at startup, # so need to resume that. self.app.flow_control.resume() async def on_agent_error(agent: AgentT, exc: BaseException) -> None: if on_error is not None: await on_error(agent, exc) await cast(AgentTestWrapper, agent).crash_test_agent(exc) return cast( AgentTestWrapperT, self.clone( cls=AgentTestWrapper, channel=channel if channel is not None else self.app.channel(), supervisor_strategy=supervisor_strategy or CrashingSupervisor, original_channel=self.channel, on_error=on_agent_error, **kwargs, ), ) def _prepare_channel( self, channel: Union[str, ChannelT] = None, internal: bool = True, schema: SchemaT = None, key_type: ModelArg = None, value_type: ModelArg = None, **kwargs: Any, ) -> ChannelT: app = self.app has_prefix = False if channel is None: channel = f"{app.conf.id}-{self.name}" has_prefix = True if isinstance(channel, ChannelT): return channel elif isinstance(channel, str): return app.topic( channel, internal=internal, schema=schema, key_type=key_type, value_type=value_type, has_prefix=has_prefix, **kwargs, ) raise TypeError( f"Channel must be channel, topic, or str; not {type(channel)}") def __call__( self, *, index: int = None, active_partitions: Set[TP] = None, stream: StreamT = None, channel: ChannelT = None, ) -> ActorRefT: """Create new actor instance for this agent.""" # The agent function can be reused by other agents/tasks. # For example: # # @app.agent(logs_topic, through='other-topic') # filter_log_errors_(stream): # async for event in stream: # if event.severity == 'error': # yield event # # @app.agent(logs_topic) # def alert_on_log_error(stream): # async for event in filter_log_errors(stream): # alert(f'Error occurred: {event!r}') # # Calling `res = filter_log_errors(it)` will end you up with # an AsyncIterable that you can reuse (but only if the agent # function is an `async def` function that yields) return self.actor_from_stream(stream, index=index, active_partitions=active_partitions, channel=channel) def actor_from_stream( self, stream: Optional[StreamT], *, index: int = None, active_partitions: Set[TP] = None, channel: ChannelT = None, ) -> ActorRefT: """Create new actor from stream.""" we_created_stream = False actual_stream: StreamT if stream is None: actual_stream = self.stream( channel=channel, concurrency_index=index, active_partitions=active_partitions, ) we_created_stream = True else: # reusing actor stream after agent restart assert stream.concurrency_index == index assert stream.active_partitions == active_partitions actual_stream = stream res = self.fun(actual_stream) if isinstance(res, AsyncIterable): if we_created_stream: actual_stream.add_processor(self._maybe_unwrap_reply_request) return cast( ActorRefT, AsyncIterableActor( self, actual_stream, res, index=actual_stream.concurrency_index, active_partitions=actual_stream.active_partitions, loop=self.loop, beacon=self.beacon, ), ) else: return cast( ActorRefT, AwaitableActor( self, actual_stream, res, index=actual_stream.concurrency_index, active_partitions=actual_stream.active_partitions, loop=self.loop, beacon=self.beacon, ), ) def add_sink(self, sink: SinkT) -> None: """Add new sink to further handle results from this agent.""" if sink not in self._sinks: self._sinks.append(sink) def stream(self, channel: ChannelT = None, active_partitions: Set[TP] = None, **kwargs: Any) -> StreamT: """Create underlying stream used by this agent.""" if channel is None: channel = cast(TopicT, self.channel_iterator).clone( is_iterator=False, active_partitions=active_partitions, ) if active_partitions is not None: assert channel.active_partitions == active_partitions s = self.app.stream( channel, loop=self.loop, active_partitions=active_partitions, prefix=self.name, beacon=self.beacon, **kwargs, ) return s def _maybe_unwrap_reply_request(self, value: V) -> Any: if isinstance(value, ReqRepRequest): return value.value return value async def _start_task( self, *, index: Optional[int], active_partitions: Optional[Set[TP]] = None, stream: StreamT = None, channel: ChannelT = None, beacon: NodeT = None, ) -> ActorRefT: # If the agent is an async function we simply start it, # if it returns an AsyncIterable/AsyncGenerator we start a task # that will consume it. actor = self( index=index, active_partitions=active_partitions, stream=stream, channel=channel, ) return await self._prepare_actor( actor, beacon if beacon is not None else self.beacon) async def _prepare_actor(self, aref: ActorRefT, beacon: NodeT) -> ActorRefT: coro: Any if isinstance(aref, Awaitable): # agent does not yield coro = aref if self._sinks: raise ImproperlyConfigured("Agent must yield to use sinks") else: # agent yields and is an AsyncIterator so we have to consume it. coro = self._slurp(aref, aiter(aref)) task = asyncio.Task(self._execute_actor(coro, aref), loop=self.loop) task._beacon = beacon # type: ignore aref.actor_task = task self._actors.add(aref) return aref async def _execute_actor(self, coro: Awaitable, aref: ActorRefT) -> None: # This executes the agent task itself, and does exception handling. _current_agent.set(self) try: await coro except asyncio.CancelledError: if self.should_stop: raise except Exception as exc: if self._on_error is not None: await self._on_error(self, exc) # Mark ActorRef as dead, so that supervisor thread # can start a new one. await aref.crash(exc) self.supervisor.wakeup() async def _slurp(self, res: ActorRefT, it: AsyncIterator) -> None: # this is used when the agent returns an AsyncIterator, # and simply consumes that async iterator. stream: Optional[StreamT] = None async for value in it: self.log.debug("%r yielded: %r", self.fun, value) if stream is None: stream = res.stream.get_active_stream() event = stream.current_event if event is not None: headers = event.headers reply_to: Optional[str] = None correlation_id: Optional[str] = None if isinstance(event.value, ReqRepRequest): req: ReqRepRequest = event.value reply_to = req.reply_to correlation_id = req.correlation_id elif headers: reply_to_bytes = headers.get("Faust-Ag-ReplyTo") if reply_to_bytes: reply_to = want_str(reply_to_bytes) correlation_id_bytes = headers.get( "Faust-Ag-CorrelationId") if correlation_id_bytes: correlation_id = want_str(correlation_id_bytes) if reply_to is not None: await self._reply(event.key, value, reply_to, cast(str, correlation_id)) await self._delegate_to_sinks(value) async def _delegate_to_sinks(self, value: Any) -> None: for sink in self._sinks: if isinstance(sink, AgentT): await sink.send(value=value) elif isinstance(sink, ChannelT): await cast(TopicT, sink).send(value=value) else: await maybe_async(cast(Callable, sink)(value)) async def _reply(self, key: Any, value: Any, reply_to: str, correlation_id: str) -> None: assert reply_to response = self._response_class(value)( key=key, value=value, correlation_id=correlation_id, ) await self.app.send( reply_to, key=None, value=response, ) def _response_class(self, value: Any) -> Type[ReqRepResponse]: if isinstance(value, ModelT): return ModelReqRepResponse return ReqRepResponse async def cast( self, value: V = None, *, key: K = None, partition: int = None, timestamp: float = None, headers: HeadersArg = None, ) -> None: """RPC operation: like :meth:`ask` but do not expect reply. Cast here is like "casting a spell", and will not expect a reply back from the agent. """ await self.send( key=key, value=value, partition=partition, timestamp=timestamp, headers=headers, ) async def ask( self, value: V = None, *, key: K = None, partition: int = None, timestamp: float = None, headers: HeadersArg = None, reply_to: ReplyToArg = None, correlation_id: str = None, ) -> Any: """RPC operation: ask agent for result of processing value. This version will wait until the result is available and return the processed value. """ p = await self.ask_nowait( value, key=key, partition=partition, timestamp=timestamp, headers=headers, reply_to=reply_to or self.app.conf.reply_to, correlation_id=correlation_id, force=True, # Send immediately, since we are waiting for result. ) app = cast(_App, self.app) await app._reply_consumer.add(p.correlation_id, p) await app.maybe_start_client() return await p async def ask_nowait( self, value: V = None, *, key: K = None, partition: int = None, timestamp: float = None, headers: HeadersArg = None, reply_to: ReplyToArg = None, correlation_id: str = None, force: bool = False, ) -> ReplyPromise: """RPC operation: ask agent for result of processing value. This version does not wait for the result to arrive, but instead returns a promise of future evaluation. """ if reply_to is None: raise TypeError("Missing reply_to argument") reply_to = self._get_strtopic(reply_to) correlation_id = correlation_id or str(uuid4()) value, headers = self._create_req(key, value, reply_to, correlation_id, headers) await self.channel.send( key=key, value=value, partition=partition, timestamp=timestamp, headers=headers, force=force, ) return ReplyPromise(reply_to, correlation_id) def _create_req( self, key: K = None, value: V = None, reply_to: ReplyToArg = None, correlation_id: str = None, headers: HeadersArg = None, ) -> Tuple[V, Optional[HeadersArg]]: if reply_to is None: raise TypeError("Missing reply_to argument") topic_name = self._get_strtopic(reply_to) correlation_id = correlation_id or str(uuid4()) open_headers = prepare_headers(headers or {}) if self.use_reply_headers: merge_headers( open_headers, { "Faust-Ag-ReplyTo": want_bytes(topic_name), "Faust-Ag-CorrelationId": want_bytes(correlation_id), }, ) return value, open_headers else: # wrap value in envelope req = self._request_class(value)( value=value, reply_to=topic_name, correlation_id=correlation_id, ) return req, open_headers def _request_class(self, value: V) -> Type[ReqRepRequest]: if isinstance(value, ModelT): return ModelReqRepRequest return ReqRepRequest async def send( self, *, key: K = None, value: V = None, partition: int = None, timestamp: float = None, headers: HeadersArg = None, key_serializer: CodecArg = None, value_serializer: CodecArg = None, callback: MessageSentCallback = None, reply_to: ReplyToArg = None, correlation_id: str = None, force: bool = False, ) -> Awaitable[RecordMetadata]: """Send message to topic used by agent.""" if reply_to: value, headers = self._create_req(key, value, reply_to, correlation_id, headers) return await self.channel.send( key=key, value=value, partition=partition, timestamp=timestamp, headers=headers, key_serializer=key_serializer, value_serializer=value_serializer, force=force, ) def _get_strtopic(self, topic: Union[str, ChannelT, TopicT, AgentT]) -> str: if isinstance(topic, AgentT): return self._get_strtopic(topic.channel) if isinstance(topic, TopicT): return topic.get_topic_name() if isinstance(topic, ChannelT): raise ValueError("Channels are unnamed topics") return topic async def map( self, values: Union[AsyncIterable, Iterable], key: K = None, reply_to: ReplyToArg = None, ) -> AsyncIterator: # pragma: no cover """RPC map operation on a list of values. A map operation iterates over results as they arrive. See :meth:`join` and :meth:`kvjoin` if you want them in order. """ # Map takes only values, but can provide one key that is used for all. async for value in self.kvmap(((key, v) async for v in aiter(values)), reply_to): yield value async def kvmap( self, items: Union[AsyncIterable[Tuple[K, V]], Iterable[Tuple[K, V]]], reply_to: ReplyToArg = None, ) -> AsyncIterator[str]: # pragma: no cover """RPC map operation on a list of ``(key, value)`` pairs. A map operation iterates over results as they arrive. See :meth:`join` and :meth:`kvjoin` if you want them in order. """ # kvmap takes (key, value) pairs. reply_to = self._get_strtopic(reply_to or self.app.conf.reply_to) # BarrierState is the promise that keeps track of pending results. # It contains a list of individual ReplyPromises. barrier = BarrierState(reply_to) async for _ in self._barrier_send(barrier, items, reply_to): # Now that we've sent a message, try to see if we have any # replies. try: _, val = barrier.get_nowait() except asyncio.QueueEmpty: pass else: yield val # All the messages have been sent so finalize the barrier. barrier.finalize() # Then iterate over the results in the group. async for _, value in barrier.iterate(): yield value async def join( self, values: Union[AsyncIterable[V], Iterable[V]], key: K = None, reply_to: ReplyToArg = None, ) -> List[Any]: # pragma: no cover """RPC map operation on a list of values. A join returns the results in order, and only returns once all values have been processed. """ return await self.kvjoin( ((key, value) async for value in aiter(values)), reply_to=reply_to, ) async def kvjoin( self, items: Union[AsyncIterable[Tuple[K, V]], Iterable[Tuple[K, V]]], reply_to: ReplyToArg = None, ) -> List[Any]: # pragma: no cover """RPC map operation on list of ``(key, value)`` pairs. A join returns the results in order, and only returns once all values have been processed. """ reply_to = self._get_strtopic(reply_to or self.app.conf.reply_to) barrier = BarrierState(reply_to) # Map correlation_id -> index posindex: MutableMapping[str, int] = { cid: i async for i, cid in aenumerate( self._barrier_send(barrier, items, reply_to)) } # All the messages have been sent so finalize the barrier. barrier.finalize() # wait until all replies received await barrier # then construct a list in the correct order. values: List = [None] * barrier.total async for correlation_id, value in barrier.iterate(): values[posindex[correlation_id]] = value return values async def _barrier_send( self, barrier: BarrierState, items: Union[AsyncIterable[Tuple[K, V]], Iterable[Tuple[K, V]]], reply_to: ReplyToArg, ) -> AsyncIterator[str]: # pragma: no cover # map: send many tasks to agents # while trying to pop incoming results off. key: K value: V async for key, value in aiter(items): # type: ignore correlation_id = str(uuid4()) p = await self.ask_nowait(key=key, value=value, reply_to=reply_to, correlation_id=correlation_id) # add reply promise to the barrier barrier.add(p) # the ReplyConsumer will call the barrier whenever a new # result comes in. app = cast(_App, self.app) await app.maybe_start_client() await app._reply_consumer.add(p.correlation_id, barrier) yield correlation_id def _repr_info(self) -> str: return shorten_fqdn(self.name) def get_topic_names(self) -> Iterable[str]: """Return list of topic names this agent subscribes to.""" channel = self.channel if isinstance(channel, TopicT): return channel.topics return [] @property def channel(self) -> ChannelT: """Return channel used by agent.""" if self._channel is None: self._channel = self._prepare_channel( self._channel_arg, schema=self._schema, key_type=self._key_type, value_type=self._value_type, **self._channel_kwargs, ) return self._channel @channel.setter def channel(self, channel: ChannelT) -> None: self._channel = channel @property def channel_iterator(self) -> AsyncIterator: """Return channel agent iterates over.""" # The channel is "memoized" here, so subsequent access to # instance.channel_iterator will return the same value. if self._channel_iterator is None: # we do not use aiter(channel) here, because # that will also add it to the topic conductor too early. self._channel_iterator = self.channel.clone(is_iterator=False) return self._channel_iterator @channel_iterator.setter def channel_iterator(self, it: AsyncIterator) -> None: self._channel_iterator = it @property def label(self) -> str: """Return human-readable description of agent.""" return self._agent_label() def _agent_label(self, name_suffix: str = "") -> str: s = f"{type(self).__name__}{name_suffix}: " s += f"{shorten_fqdn(qualname(self.fun))}" return s @property def shortlabel(self) -> str: """Return short description of agent.""" return self._agent_label()
class CallbackCollection(Set): __slots__ = "__sender", "__callbacks", "__weak_callbacks", "__lock" def __init__(self, sender): self.__sender = ref(sender) self.__callbacks = set() self.__weak_callbacks = WeakSet() self.__lock = Lock() def add(self, callback: Callable, weak=True): if self.is_frozen: raise RuntimeError("Collection frozen") if not callable(callback): raise ValueError("Callback is not callable") with self.__lock: if weak: self.__weak_callbacks.add(callback) else: self.__callbacks.add(callback) def remove(self, callback: Callable): if self.is_frozen: raise RuntimeError("Collection frozen") with self.__lock: try: self.__callbacks.remove(callback) except KeyError: self.__weak_callbacks.remove(callback) def clear(self): if self.is_frozen: raise RuntimeError("Collection frozen") with self.__lock: self.__callbacks.clear() self.__weak_callbacks.clear() @property def is_frozen(self) -> bool: return isinstance(self.__callbacks, frozenset) def freeze(self): if self.is_frozen: raise RuntimeError("Collection already frozen") with self.__lock: self.__callbacks = frozenset(self.__callbacks) self.__weak_callbacks = WeakSet(self.__weak_callbacks) def unfreeze(self): if not self.is_frozen: raise RuntimeError("Collection is not frozen") with self.__lock: self.__callbacks = set(self.__callbacks) self.__weak_callbacks = WeakSet(self.__weak_callbacks) def __contains__(self, x: object) -> bool: return x in self.__callbacks or x in self.__weak_callbacks def __len__(self) -> int: return len(self.__callbacks) + len(self.__weak_callbacks) def __iter__(self) -> Iterable[Callable]: return iter(chain(self.__callbacks, self.__weak_callbacks)) def __bool__(self): return bool(self.__callbacks) or bool(self.__weak_callbacks) def __copy__(self): instance = self.__class__(self.__sender()) with self.__lock: for cb in self.__callbacks: instance.add(cb, weak=False) for cb in self.__weak_callbacks: instance.add(cb, weak=True) if self.is_frozen: instance.freeze() return instance def __call__(self, *args, **kwargs): with self.__lock: for cb in self: try: cb(self.__sender(), *args, **kwargs) except Exception: log.exception("Callback error")
class TestWeakSet(unittest.TestCase): def setUp(self): # need to keep references to them self.items = [ustr(c) for c in ('a', 'b', 'c')] self.items2 = [ustr(c) for c in ('x', 'y', 'z')] self.ab_items = [ustr(c) for c in 'ab'] self.abcde_items = [ustr(c) for c in 'abcde'] self.def_items = [ustr(c) for c in 'def'] self.ab_weakset = WeakSet(self.ab_items) self.abcde_weakset = WeakSet(self.abcde_items) self.def_weakset = WeakSet(self.def_items) self.letters = [ustr(c) for c in string.ascii_letters] self.s = WeakSet(self.items) self.d = dict.fromkeys(self.items) self.obj = ustr('F') self.fs = WeakSet([self.obj]) def test_methods(self): weaksetmethods = dir(WeakSet) for method in dir(set): if method == 'test_c_api' or method.startswith('_'): continue self.assertIn(method, weaksetmethods, "WeakSet missing method " + method) def test_new_or_init(self): self.assertRaises(TypeError, WeakSet, [], 2) def test_len(self): self.assertEqual(len(self.s), len(self.d)) self.assertEqual(len(self.fs), 1) del self.obj support.gc_collect() self.assertEqual(len(self.fs), 0) def test_contains(self): for c in self.letters: self.assertEqual(c in self.s, c in self.d) # 1 is not weakref'able, but that TypeError is caught by __contains__ self.assertNotIn(1, self.s) self.assertIn(self.obj, self.fs) del self.obj support.gc_collect() self.assertNotIn(ustr('F'), self.fs) def test_union(self): u = self.s.union(self.items2) for c in self.letters: self.assertEqual(c in u, c in self.d or c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(u), WeakSet) self.assertRaises(TypeError, self.s.union, [[]]) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet(self.items + self.items2) c = C(self.items2) self.assertEqual(self.s.union(c), x) del c self.assertEqual(len(u), len(self.items) + len(self.items2)) self.items2.pop() gc.collect() self.assertEqual(len(u), len(self.items) + len(self.items2)) def test_or(self): i = self.s.union(self.items2) self.assertEqual(self.s | set(self.items2), i) self.assertEqual(self.s | frozenset(self.items2), i) def test_intersection(self): s = WeakSet(self.letters) i = s.intersection(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.items2 and c in self.letters) self.assertEqual(s, WeakSet(self.letters)) self.assertEqual(type(i), WeakSet) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet([]) self.assertEqual(i.intersection(C(self.items)), x) self.assertEqual(len(i), len(self.items2)) self.items2.pop() gc.collect() self.assertEqual(len(i), len(self.items2)) def test_isdisjoint(self): self.assertTrue(self.s.isdisjoint(WeakSet(self.items2))) self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters))) def test_and(self): i = self.s.intersection(self.items2) self.assertEqual(self.s & set(self.items2), i) self.assertEqual(self.s & frozenset(self.items2), i) def test_difference(self): i = self.s.difference(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c not in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.difference, [[]]) def test_sub(self): i = self.s.difference(self.items2) self.assertEqual(self.s - set(self.items2), i) self.assertEqual(self.s - frozenset(self.items2), i) def test_symmetric_difference(self): i = self.s.symmetric_difference(self.items2) for c in self.letters: self.assertEqual(c in i, (c in self.d) ^ (c in self.items2)) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) self.assertEqual(len(i), len(self.items) + len(self.items2)) self.items2.pop() gc.collect() self.assertEqual(len(i), len(self.items) + len(self.items2)) def test_xor(self): i = self.s.symmetric_difference(self.items2) self.assertEqual(self.s ^ set(self.items2), i) self.assertEqual(self.s ^ frozenset(self.items2), i) def test_sub_and_super(self): self.assertTrue(self.ab_weakset <= self.abcde_weakset) self.assertTrue(self.abcde_weakset <= self.abcde_weakset) self.assertTrue(self.abcde_weakset >= self.ab_weakset) self.assertFalse(self.abcde_weakset <= self.def_weakset) self.assertFalse(self.abcde_weakset >= self.def_weakset) self.assertTrue(set('a').issubset('abc')) self.assertTrue(set('abc').issuperset('a')) self.assertFalse(set('a').issubset('cbs')) self.assertFalse(set('cbs').issuperset('a')) def test_lt(self): self.assertTrue(self.ab_weakset < self.abcde_weakset) self.assertFalse(self.abcde_weakset < self.def_weakset) self.assertFalse(self.ab_weakset < self.ab_weakset) self.assertFalse(WeakSet() < WeakSet()) def test_gt(self): self.assertTrue(self.abcde_weakset > self.ab_weakset) self.assertFalse(self.abcde_weakset > self.def_weakset) self.assertFalse(self.ab_weakset > self.ab_weakset) self.assertFalse(WeakSet() > WeakSet()) def test_gc(self): # Create a nest of cycles to exercise overall ref count check s = WeakSet(Foo() for i in range(1000)) for elem in s: elem.cycle = s elem.sub = elem elem.set = WeakSet([elem]) def test_subclass_with_custom_hash(self): # Bug #1257731 class H(WeakSet): def __hash__(self): return int(id(self) & 0x7fffffff) s=H() f=set() f.add(s) self.assertIn(s, f) f.remove(s) f.add(s) f.discard(s) def test_init(self): s = WeakSet() s.__init__(self.items) self.assertEqual(s, self.s) s.__init__(self.items2) self.assertEqual(s, WeakSet(self.items2)) self.assertRaises(TypeError, s.__init__, s, 2); self.assertRaises(TypeError, s.__init__, 1); def test_constructor_identity(self): s = WeakSet(self.items) t = WeakSet(s) self.assertNotEqual(id(s), id(t)) def test_hash(self): self.assertRaises(TypeError, hash, self.s) def test_clear(self): self.s.clear() self.assertEqual(self.s, WeakSet([])) self.assertEqual(len(self.s), 0) def test_copy(self): dup = self.s.copy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) def test_add(self): x = ustr('Q') self.s.add(x) self.assertIn(x, self.s) dup = self.s.copy() self.s.add(x) self.assertEqual(self.s, dup) self.assertRaises(TypeError, self.s.add, []) self.fs.add(Foo()) support.gc_collect() self.assertTrue(len(self.fs) == 1) self.fs.add(self.obj) self.assertTrue(len(self.fs) == 1) def test_remove(self): x = ustr('a') self.s.remove(x) self.assertNotIn(x, self.s) self.assertRaises(KeyError, self.s.remove, x) self.assertRaises(TypeError, self.s.remove, []) def test_discard(self): a, q = ustr('a'), ustr('Q') self.s.discard(a) self.assertNotIn(a, self.s) self.s.discard(q) self.assertRaises(TypeError, self.s.discard, []) def test_pop(self): for i in range(len(self.s)): elem = self.s.pop() self.assertNotIn(elem, self.s) self.assertRaises(KeyError, self.s.pop) def test_update(self): retval = self.s.update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): self.assertIn(c, self.s) self.assertRaises(TypeError, self.s.update, [[]]) def test_update_set(self): self.s.update(set(self.items2)) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_ior(self): self.s |= set(self.items2) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_intersection_update(self): retval = self.s.intersection_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.intersection_update, [[]]) def test_iand(self): self.s &= set(self.items2) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_difference_update(self): retval = self.s.difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.difference_update, [[]]) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_isub(self): self.s -= set(self.items2) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_symmetric_difference_update(self): retval = self.s.symmetric_difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_ixor(self): self.s ^= set(self.items2) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_inplace_on_self(self): t = self.s.copy() t |= t self.assertEqual(t, self.s) t &= t self.assertEqual(t, self.s) t -= t self.assertEqual(t, WeakSet()) t = self.s.copy() t ^= t self.assertEqual(t, WeakSet()) def test_eq(self): # issue 5964 self.assertTrue(self.s == self.s) self.assertTrue(self.s == WeakSet(self.items)) self.assertFalse(self.s == set(self.items)) self.assertFalse(self.s == list(self.items)) self.assertFalse(self.s == tuple(self.items)) self.assertFalse(self.s == WeakSet([Foo])) self.assertFalse(self.s == 1) def test_ne(self): self.assertTrue(self.s != set(self.items)) s1 = WeakSet() s2 = WeakSet() self.assertFalse(s1 != s2) def test_weak_destroy_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed # Create new items to be sure no-one else holds a reference items = [ustr(c) for c in ('a', 'b', 'c')] s = WeakSet(items) it = iter(s) next(it) # Trigger internal iteration # Destroy an item del items[-1] gc.collect() # just in case # We have removed either the first consumed items, or another one self.assertIn(len(list(it)), [len(items), len(items) - 1]) del it # The removal has been committed self.assertEqual(len(s), len(items)) def test_weak_destroy_and_mutate_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed items = [ustr(c) for c in string.ascii_letters] s = WeakSet(items) @contextlib.contextmanager def testcontext(): try: it = iter(s) # Start iterator yielded = ustr(str(next(it))) # Schedule an item for removal and recreate it u = ustr(str(items.pop())) if yielded == u: # The iterator still has a reference to the removed item, # advance it (issue #20006). next(it) gc.collect() # just in case yield u finally: it = None # should commit all removals with testcontext() as u: self.assertNotIn(u, s) with testcontext() as u: self.assertRaises(KeyError, s.remove, u) self.assertNotIn(u, s) with testcontext() as u: s.add(u) self.assertIn(u, s) t = s.copy() with testcontext() as u: s.update(t) self.assertEqual(len(s), len(t)) with testcontext() as u: s.clear() self.assertEqual(len(s), 0) def test_len_cycles(self): N = 20 items = [RefCycle() for i in range(N)] s = WeakSet(items) del items it = iter(s) try: next(it) except StopIteration: pass gc.collect() n1 = len(s) del it gc.collect() gc.collect() n2 = len(s) # one item may be kept alive inside the iterator self.assertIn(n1, (0, 1)) self.assertEqual(n2, 0) @support.impl_detail("PyPy has no cyclic collection", pypy=False) def test_len_race(self): # Extended sanity checks for len() in the face of cyclic collection self.addCleanup(gc.set_threshold, *gc.get_threshold()) for th in range(1, 100): N = 20 gc.collect(0) gc.set_threshold(th, th, th) items = [RefCycle() for i in range(N)] s = WeakSet(items) del items # All items will be collected at next garbage collection pass it = iter(s) try: next(it) except StopIteration: pass n1 = len(s) del it n2 = len(s) self.assertGreaterEqual(n1, 0) self.assertLessEqual(n1, N) self.assertGreaterEqual(n2, 0) self.assertLessEqual(n2, n1)
class RemoteServiceServer(RemoteServiceBase): """The server side of a RPC communication. Considers all messages coming from the other end as requests for RPCs executions. Will perform them and send results as responses. After having created an instance and initialized it with a socket the reader loop should be started by calling run. """ def __init__(self, local_service, remote_address): """Create a responder for the given service. local_service (Service): the object whose methods should be called via RPC. For other arguments see RemoteServiceBase. """ super(RemoteServiceServer, self).__init__(remote_address) self.local_service = local_service self.pending_incoming_requests_threads = WeakSet() def finalize(self, reason=""): """See RemoteServiceBase.finalize.""" super(RemoteServiceServer, self).finalize(reason) for thread in self.pending_incoming_requests_threads: thread.kill(RPCError(reason), block=False) self.pending_incoming_requests_threads.clear() def handle(self, socket_): self.initialize(socket_, self.remote_address) gevent.spawn(self.run) def run(self): """Start listening for requests, and go on forever. Read messages from the socket and issue greenlets to parse them, execute methods and send the response to the client. This method won't return as long as there's something to read, it's therefore advisable to spawn a greenlet to call it. """ while True: try: data = self._read() except IOError: break if len(data) == 0: self.finalize("Connection closed.") break gevent.spawn(self.process_data, data) def process_data(self, data): """Handle the message. JSON-decode it and forward it to process_incoming_request (unconditionally!). data (bytes): the message read from the socket. """ # Decode the incoming data. try: message = json.loads(data, encoding='utf-8') except ValueError: logger.warning("Cannot parse incoming message, discarding.") return self.process_incoming_request(message) def process_incoming_request(self, request): """Handle the request. Parse the request, execute the method it asks for, format the result and send the response. request (dict): the JSON-decoded request. """ # Validate the request. if not {"__id", "__method", "__data"}.issubset(request.iterkeys()): logger.warning("Request is missing some fields, ingoring.") return # Determine the ID. id_ = request["__id"] # Store the request. self.pending_incoming_requests_threads.add(gevent.getcurrent()) # Build the response. response = {"__id": id_, "__data": None, "__error": None} method_name = request["__method"] if not hasattr(self.local_service, method_name): response["__error"] = "Method %s doesn't exist." % method_name else: method = getattr(self.local_service, method_name) if not getattr(method, "rpc_callable", False): response["__error"] = "Method %s isn't callable." % method_name else: try: response["__data"] = method(**request["__data"]) except Exception as error: response["__error"] = "%s: %s\n%s" % \ (error.__class__.__name__, error, traceback.format_exc()) # Encode it. try: data = json.dumps(response, encoding='utf-8') except (TypeError, ValueError): logger.warning("JSON encoding failed.", exc_info=True) return # Send it. try: self._write(data) except IOError: # Log messages have already been produced. return
class Signal: ## Signal types. # These indicate the type of a signal, that is, how the signal handles calling the connected # slots. # - Direct connections immediately call the connected slots from the thread that called emit(). # - Auto connections will push the call onto the event loop if the current thread is # not the main thread, but make a direct call if it is. # - Queued connections will always push # the call on to the event loop. Direct = 1 Auto = 2 Queued = 3 ## Initialize the instance. # # \param kwargs Keyword arguments. # Possible keywords: # - type: The signal type. Defaults to Direct. def __init__(self, **kwargs): self.__functions = WeakSet() self.__methods = WeakKeyDictionary() self.__signals = WeakSet() self.__type = kwargs.get("type", Signal.Auto) ## \exception NotImplementedError def __call__(self): raise NotImplementedError("Call emit() to emit a signal") ## Get type of the signal # \return \type{int} Direct(1), Auto(2) or Queued(3) def getType(self): return self.__type ## Emit the signal which indirectly calls all of the connected slots. # # \param args The positional arguments to pass along. # \param kargs The keyword arguments to pass along. # # \note If the Signal type is Queued and this is not called from the application thread # the call will be posted as an event to the application main thread, which means the # function will be called on the next application event loop tick. def emit(self, *args, **kargs): try: if self.__type == Signal.Queued: Signal._app.functionEvent(CallFunctionEvent(self.emit, args, kargs)) return if self.__type == Signal.Auto: if threading.current_thread() is not Signal._app.getMainThread(): Signal._app.functionEvent(CallFunctionEvent(self.emit, args, kargs)) return except AttributeError: # If Signal._app is not set return # Call handler functions for func in self.__functions: func(*args, **kargs) # Call handler methods for dest, funcs in self.__methods.items(): for func in funcs: func(dest, *args, **kargs) # Emit connected signals for signal in self.__signals: signal.emit(*args, **kargs) ## Connect to this signal. # \param connector The signal or slot (function) to connect. def connect(self, connector): if type(connector) == Signal: if connector == self: return self.__signals.add(connector) elif inspect.ismethod(connector): if connector.__self__ not in self.__methods: self.__methods[connector.__self__] = set() self.__methods[connector.__self__].add(connector.__func__) else: self.__functions.add(connector) ## Disconnect from this signal. # \param connector The signal or slot (function) to disconnect. def disconnect(self, connector): try: if connector in self.__signals: self.__signals.remove(connector) elif inspect.ismethod(connector) and connector.__self__ in self.__methods: self.__methods[connector.__self__].remove(connector.__func__) else: if connector in self.__functions: self.__functions.remove(connector) except KeyError: #Ignore errors when connector is not connected to this signal. pass ## Disconnect all connected slots. def disconnectAll(self): self.__functions.clear() self.__methods.clear() self.__signals.clear() ## private: # To avoid circular references when importing Application, this should be # set by the Application instance. _app = None
class TestWeakSet(unittest.TestCase): def setUp(self): # need to keep references to them self.items = [SomeClass(c) for c in ('a', 'b', 'c')] self.items2 = [SomeClass(c) for c in ('x', 'y', 'z')] self.letters = [SomeClass(c) for c in string.ascii_letters] self.ab_items = [SomeClass(c) for c in 'ab'] self.abcde_items = [SomeClass(c) for c in 'abcde'] self.def_items = [SomeClass(c) for c in 'def'] self.ab_weakset = WeakSet(self.ab_items) self.abcde_weakset = WeakSet(self.abcde_items) self.def_weakset = WeakSet(self.def_items) self.s = WeakSet(self.items) self.d = dict.fromkeys(self.items) self.obj = SomeClass('F') self.fs = WeakSet([self.obj]) def test_methods(self): weaksetmethods = dir(WeakSet) for method in dir(set): if method == 'test_c_api' or method.startswith('_'): continue self.assertIn(method, weaksetmethods, "WeakSet missing method " + method) def test_new_or_init(self): self.assertRaises(TypeError, WeakSet, [], 2) def test_len(self): self.assertEqual(len(self.s), len(self.d)) self.assertEqual(len(self.fs), 1) del self.obj test_support.gc_collect() # len of weak collections is eventually consistent on # Jython. In practice this does not matter because of the # nature of weaksets - we cannot rely on what happens in the # reaper thread and how it interacts with gc self.assertIn(len(self.fs), (0, 1)) def test_contains(self): for c in self.letters: self.assertEqual(c in self.s, c in self.d) # 1 is not weakref'able, but that TypeError is caught by __contains__ self.assertNotIn(1, self.s) self.assertIn(self.obj, self.fs) del self.obj test_support.gc_collect() self.assertNotIn(SomeClass('F'), self.fs) def test_union(self): u = self.s.union(self.items2) for c in self.letters: self.assertEqual(c in u, c in self.d or c in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(u), WeakSet) self.assertRaises(TypeError, self.s.union, [[]]) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet(self.items + self.items2) c = C(self.items2) self.assertEqual(self.s.union(c), x) del c test_support.gc_collect() self.assertEqual(len(list(u)), len(list(self.items)) + len(list(self.items2))) self.items2.pop() test_support.gc_collect() self.assertEqual(len(list(u)), len(list(self.items)) + len(list(self.items2))) def test_or(self): i = self.s.union(self.items2) self.assertEqual(self.s | set(self.items2), i) self.assertEqual(self.s | frozenset(self.items2), i) def test_intersection(self): s = WeakSet(self.letters) i = s.intersection(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.items2 and c in self.letters) self.assertEqual(s, WeakSet(self.letters)) self.assertEqual(type(i), WeakSet) for C in set, frozenset, dict.fromkeys, list, tuple: x = WeakSet([]) self.assertEqual(i.intersection(C(self.items)), x) self.assertEqual(len(i), len(self.items2)) self.items2.pop() test_support.gc_collect() self.assertEqual(len(list(i)), len(list(self.items2))) def test_isdisjoint(self): self.assertTrue(self.s.isdisjoint(WeakSet(self.items2))) self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters))) def test_and(self): i = self.s.intersection(self.items2) self.assertEqual(self.s & set(self.items2), i) self.assertEqual(self.s & frozenset(self.items2), i) def test_difference(self): i = self.s.difference(self.items2) for c in self.letters: self.assertEqual(c in i, c in self.d and c not in self.items2) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.difference, [[]]) def test_sub(self): i = self.s.difference(self.items2) self.assertEqual(self.s - set(self.items2), i) self.assertEqual(self.s - frozenset(self.items2), i) def test_symmetric_difference(self): i = self.s.symmetric_difference(self.items2) for c in self.letters: self.assertEqual(c in i, (c in self.d) ^ (c in self.items2)) self.assertEqual(self.s, WeakSet(self.items)) self.assertEqual(type(i), WeakSet) self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) self.assertEqual(len(i), len(self.items) + len(self.items2)) self.items2.pop() test_support.gc_collect() self.assertEqual(len(list(i)), len(list(self.items)) + len(list(self.items2))) def test_xor(self): i = self.s.symmetric_difference(self.items2) self.assertEqual(self.s ^ set(self.items2), i) self.assertEqual(self.s ^ frozenset(self.items2), i) def test_sub_and_super(self): self.assertTrue(self.ab_weakset <= self.abcde_weakset) self.assertTrue(self.abcde_weakset <= self.abcde_weakset) self.assertTrue(self.abcde_weakset >= self.ab_weakset) self.assertFalse(self.abcde_weakset <= self.def_weakset) self.assertFalse(self.abcde_weakset >= self.def_weakset) self.assertTrue(set('a').issubset('abc')) self.assertTrue(set('abc').issuperset('a')) self.assertFalse(set('a').issubset('cbs')) self.assertFalse(set('cbs').issuperset('a')) def test_lt(self): self.assertTrue(self.ab_weakset < self.abcde_weakset) self.assertFalse(self.abcde_weakset < self.def_weakset) self.assertFalse(self.ab_weakset < self.ab_weakset) self.assertFalse(WeakSet() < WeakSet()) def test_gt(self): self.assertTrue(self.abcde_weakset > self.ab_weakset) self.assertFalse(self.abcde_weakset > self.def_weakset) self.assertFalse(self.ab_weakset > self.ab_weakset) self.assertFalse(WeakSet() > WeakSet()) def test_gc(self): # Create a nest of cycles to exercise overall ref count check s = WeakSet(Foo() for i in range(1000)) for elem in s: elem.cycle = s elem.sub = elem elem.set = WeakSet([elem]) def test_subclass_with_custom_hash(self): # Bug #1257731 class H(WeakSet): def __hash__(self): return int(id(self) & 0x7fffffff) s=H() f=set() f.add(s) self.assertIn(s, f) f.remove(s) f.add(s) f.discard(s) def test_init(self): s = WeakSet() s.__init__(self.items) self.assertEqual(s, self.s) s.__init__(self.items2) self.assertEqual(s, WeakSet(self.items2)) self.assertRaises(TypeError, s.__init__, s, 2); self.assertRaises(TypeError, s.__init__, 1); def test_constructor_identity(self): s = WeakSet(self.items) t = WeakSet(s) self.assertNotEqual(id(s), id(t)) def test_hash(self): self.assertRaises(TypeError, hash, self.s) def test_clear(self): self.s.clear() self.assertEqual(self.s, WeakSet([])) self.assertEqual(len(self.s), 0) def test_copy(self): dup = self.s.copy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) def test_add(self): x = SomeClass('Q') self.s.add(x) self.assertIn(x, self.s) dup = self.s.copy() self.s.add(x) self.assertEqual(self.s, dup) if not test_support.is_jython: # Jython/JVM can weakly reference list and other objects self.assertRaises(TypeError, self.s.add, []) self.fs.add(Foo()) test_support.gc_collect() # CPython assumes Foo() went out of scope and was collected, so ensure the same self.assertEqual(len(list(self.fs)), 1) self.fs.add(self.obj) self.assertEqual(len(list(self.fs)), 1) def test_remove(self): x = SomeClass('a') self.s.remove(x) self.assertNotIn(x, self.s) self.assertRaises(KeyError, self.s.remove, x) if not test_support.is_jython: # Jython/JVM can weakly reference list and other objects self.assertRaises(TypeError, self.s.remove, []) def test_discard(self): a, q = SomeClass('a'), SomeClass('Q') self.s.discard(a) self.assertNotIn(a, self.s) self.s.discard(q) if not test_support.is_jython: # Jython/JVM can weakly reference list and other objects self.assertRaises(TypeError, self.s.discard, []) def test_pop(self): for i in range(len(self.s)): elem = self.s.pop() self.assertNotIn(elem, self.s) self.assertRaises(KeyError, self.s.pop) def test_update(self): retval = self.s.update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): self.assertIn(c, self.s) self.assertRaises(TypeError, self.s.update, [[]]) def test_update_set(self): self.s.update(set(self.items2)) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_ior(self): self.s |= set(self.items2) for c in (self.items + self.items2): self.assertIn(c, self.s) def test_intersection_update(self): retval = self.s.intersection_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.intersection_update, [[]]) def test_iand(self): self.s &= set(self.items2) for c in (self.items + self.items2): if c in self.items2 and c in self.items: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_difference_update(self): retval = self.s.difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) if not test_support.is_jython: # Jython/JVM can weakly reference list and other objects self.assertRaises(TypeError, self.s.difference_update, [[]]) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_isub(self): self.s -= set(self.items2) for c in (self.items + self.items2): if c in self.items and c not in self.items2: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_symmetric_difference_update(self): retval = self.s.symmetric_difference_update(self.items2) self.assertEqual(retval, None) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) def test_ixor(self): self.s ^= set(self.items2) for c in (self.items + self.items2): if (c in self.items) ^ (c in self.items2): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_inplace_on_self(self): t = self.s.copy() t |= t self.assertEqual(t, self.s) t &= t self.assertEqual(t, self.s) t -= t self.assertEqual(t, WeakSet()) t = self.s.copy() t ^= t self.assertEqual(t, WeakSet()) def test_eq(self): # issue 5964 (http://bugs.python.org/issue5964) self.assertEqual(self.s, self.s) self.assertEqual(self.s, WeakSet(self.items)) # Jython diverges here in the next test because it constructs # WeakSet as a subclass of set; this seems to be the proper # thing to do given what is the typical comparison self.assertEqual(self.s, set(self.items)) self.assertNotEqual(self.s, list(self.items)) self.assertNotEqual(self.s, tuple(self.items)) self.assertNotEqual(self.s, 1) def test_weak_destroy_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed # Create new items to be sure no-one else holds a reference items = [SomeClass(c) for c in ('a', 'b', 'c')] s = WeakSet(items) it = iter(s) next(it) # Trigger internal iteration # Destroy an item del items[-1] test_support.gc_collect() # just in case # We have removed either the first consumed items, or another one self.assertIn(len(list(it)), [len(items), len(items) - 1]) del it extra_collect() # The removal has been committed self.assertEqual(len(s), len(items)) def test_weak_destroy_and_mutate_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed items = [SomeClass(c) for c in string.ascii_letters] s = WeakSet(items) @contextlib.contextmanager def testcontext(): try: it = iter(s) next(it) # Schedule an item for removal and recreate it u = SomeClass(str(items.pop())) test_support.gc_collect() # just in case yield u finally: it = None # should commit all removals test_support.gc_collect() with testcontext() as u: self.assertNotIn(u, s) with testcontext() as u: self.assertRaises(KeyError, s.remove, u) self.assertNotIn(u, s) with testcontext() as u: s.add(u) self.assertIn(u, s) t = s.copy() with testcontext() as u: s.update(t) self.assertEqual(len(s), len(t)) with testcontext() as u: s.clear() self.assertEqual(len(s), 0) def test_len_cycles(self): N = 20 items = [RefCycle() for i in range(N)] s = WeakSet(items) del items # do some gc test_support.gc_collect() it = iter(s) try: next(it) except StopIteration: pass # do some gc test_support.gc_collect() n1 = len(s) del it # do some gc test_support.gc_collect() n2 = len(s) # one item may be kept alive inside the iterator self.assertIn(n1, (0, 1)) self.assertEqual(n2, 0) @unittest.skipIf(test_support.is_jython, "GarbageCollection not deterministic in Jython") def test_len_race(self): # Extended sanity checks for len() in the face of cyclic collection self.addCleanup(gc.set_threshold, *gc.get_threshold()) for th in range(1, 100): N = 20 gc.collect(0) gc.set_threshold(th, th, th) items = [RefCycle() for i in range(N)] s = WeakSet(items) del items # All items will be collected at next garbage collection pass it = iter(s) try: next(it) except StopIteration: pass n1 = len(s) del it n2 = len(s) self.assertGreaterEqual(n1, 0) self.assertLessEqual(n1, N) self.assertGreaterEqual(n2, 0) self.assertLessEqual(n2, n1)
class Privacy(LineOfSight): _PRIVACY_SURFACE_BLOCKING_FOOTPRINT_COST = 100000 _PRIVACY_DISCOURAGEMENT_COST = routing.get_default_discouragement_cost() _SHOO_CONSTRAINT_RADIUS = Tunable(description='\n The radius of the constraint a Shooed Sim will attempt to route to.\n ', tunable_type=float, default=2.5) _UNAVAILABLE_TOOLTIP = TunableLocalizedStringFactory(description='\n Tooltip displayed when an object is not accessible due to being inside\n a privacy region.\n ') _EMBARRASSED_AFFORDANCE = TunableReference(description='\n The affordance a Sim will play when getting embarrassed by walking in\n on a privacy situation.\n ', manager=services.get_instance_manager(Types.INTERACTION)) def __init__(self, *, interaction=None, tests=None, shoo_exempt_tests=None, max_line_of_sight_radius=None, map_divisions=None, simplification_ratio=None, boundary_epsilon=None, facing_offset=None, routing_surface_only=None, shoo_constraint_radius=None, unavailable_tooltip=None, embarrassed_affordance=None, reserved_surface_space=None, vehicle_tests=None, central_object=None, post_route_affordance=None, add_to_privacy_service=True, privacy_cost_override=None, additional_exit_offsets=None, persistent_instance=False, privacy_violators=None): super().__init__(max_line_of_sight_radius, map_divisions, simplification_ratio, boundary_epsilon) logger.assert_raise(bool(interaction) != bool(central_object), 'Privacy must define either one of interaction or central object, and never both.') self._max_line_of_sight_radius = max_line_of_sight_radius self._interaction = interaction self._tests = tests self._shoo_exempt_tests = shoo_exempt_tests self._privacy_constraints = [] self._allowed_sims = WeakSet() self._disallowed_sims = WeakSet() self._violators = WeakSet() self._late_violators = WeakSet() self._exempt_sims = WeakSet() self.is_active = False self.has_shooed = False self.central_object = central_object self.additional_exit_offsets = additional_exit_offsets self._multi_surface = True self.persistent_instance = persistent_instance self._routing_surface_only = routing_surface_only self._shoo_constraint_radius = shoo_constraint_radius self._unavailable_tooltip = unavailable_tooltip self._embarrassed_affordance = embarrassed_affordance self._reserved_surface_space = reserved_surface_space self._post_route_affordance = post_route_affordance self._privacy_cost_override = privacy_cost_override self.privacy_violators = privacy_violators self._vehicle_tests = vehicle_tests self._pushed_interactions = [] if add_to_privacy_service: self.add_privacy() @property def shoo_constraint_radius(self): return self._shoo_constraint_radius or self._SHOO_CONSTRAINT_RADIUS @property def unavailable_tooltip(self): return self._unavailable_toolip or self._UNAVAILABLE_TOOLTIP @property def embarrassed_affordance(self): return self._embarrassed_affordance or self._EMBARRASSED_AFFORDANCE @property def privacy_discouragement_cost(self): return self._privacy_cost_override or self._PRIVACY_DISCOURAGEMENT_COST @property def interaction(self): return self._interaction @property def is_active(self) -> bool: return self._is_active @is_active.setter def is_active(self, value): self._is_active = value def _is_sim_allowed(self, sim): if self._tests: resolver = SingleSimResolver(sim.sim_info) if self._interaction is None else self._interaction.get_resolver(target=sim) if self._tests and self._tests.run_tests(resolver): return True elif self._interaction is not None and self._interaction.can_sim_violate_privacy(sim): return True if self._interaction is not None and self._interaction.can_sim_violate_privacy(sim): return True return False def evaluate_sim(self, sim): if self._is_sim_allowed(sim): self._allowed_sims.add(sim) return True self._disallowed_sims.add(sim) return False def build_privacy(self, target=None): self.is_active = True if self.central_object is None: target_object = self._interaction.get_participant(ParticipantType.Object) target_object = None if target_object.is_sim else target_object self.central_object = target_object or (target or self._interaction.sim) if self._routing_surface_only: allow_object_routing_surface = True routing_surface = self.central_object.provided_routing_surface if routing_surface is None: return False else: allow_object_routing_surface = False routing_surface = self.central_object.routing_surface self.generate(self.central_object.position, routing_surface, allow_object_routing_surface=allow_object_routing_surface) for poly in self.constraint.geometry.polygon: self._privacy_constraints.append(PolygonFootprint(poly, routing_surface=routing_surface, cost=self.privacy_discouragement_cost, footprint_type=FootprintType.FOOTPRINT_TYPE_PATH, enabled=True)) if self._reserved_surface_space is not None and target is not None: reserved_space = self._reserved_surface_space.reserved_space try: polygon = _generate_single_poly_rectangle_points(target.position, target.part_owner.orientation.transform_vector(sims4.math.Vector3.Z_AXIS()), target.part_owner.orientation.transform_vector(sims4.math.Vector3.X_AXIS()), reserved_space.left, reserved_space.right, reserved_space.front, reserved_space.back) except AttributeError as exc: raise AttributeError('Interaction: {} is trying to reserve surface space with sim as target. Exception:{}'.format(self._interaction, exc)) routing_surface = self.central_object.provided_routing_surface if routing_surface is None: routing_surface = target.routing_surface footprint_cost = self.privacy_discouragement_cost if self._reserved_surface_space.allow_routing else self._PRIVACY_SURFACE_BLOCKING_FOOTPRINT_COST self._privacy_constraints.append(PolygonFootprint(polygon, routing_surface=routing_surface, cost=footprint_cost, footprint_type=FootprintType.FOOTPRINT_TYPE_PATH, enabled=True)) if self.privacy_violators & PrivacyViolators.SIM: if self._interaction is not None: self._allowed_sims.update(self._interaction.get_participants(ParticipantType.AllSims)) for sim in services.sim_info_manager().instanced_sims_gen(): if sim not in self._allowed_sims: self.evaluate_sim(sim) violating_sims = self.find_violating_sims() self._exempt_sims = set([s for s in violating_sims if self.is_sim_shoo_exempt(s)]) self._cancel_unavailable_interactions(violating_sims) self._add_overrides_and_constraints_if_needed(violating_sims) if self.privacy_violators & PrivacyViolators.VEHICLES: violating_vehicles = self.find_violating_vehicles() for vehicle in violating_vehicles: vehicle.objectrouting_component.handle_privacy_violation(self) return True def cleanup_privacy_instance(self): if self.is_active: self.is_active = False for sim in self._allowed_sims: self.remove_override_for_sim(sim) for sim in self._late_violators: self.remove_override_for_sim(sim) del self._privacy_constraints[:] self._allowed_sims.clear() self._disallowed_sims.clear() self._violators.clear() self._late_violators.clear() self._exempt_sims.clear() self._cancel_pushed_interactions() def add_privacy(self): services.privacy_service().add_instance(self) def remove_privacy(self): self.cleanup_privacy_instance() services.privacy_service().remove_instance(self) def intersects_with_object(self, obj): if obj.routing_surface != self.central_object.routing_surface: return False delta = obj.position - self.central_object.position distance = delta.magnitude_2d_squared() if distance > self.max_line_of_sight_radius*self.max_line_of_sight_radius: return False object_footprint = obj.footprint_polygon if object_footprint is None: object_footprint = sims4.geometry.CompoundPolygon([sims4.geometry.Polygon([obj.position])]) return self.constraint.geometry.polygon.intersects(object_footprint) def vehicle_violates_privacy(self, vehicle): if vehicle.objectrouting_component is None: return False if self._vehicle_tests is not None: resolver = SingleObjectResolver(vehicle) if self._vehicle_tests.run_tests(resolver): return False elif not self.intersects_with_object(vehicle): return False elif not self.intersects_with_object(vehicle): return False return True def find_violating_vehicles(self): violators = [] privacy_service = services.privacy_service() for vehicle in privacy_service.get_potential_vehicle_violators(): if self.vehicle_violates_privacy(vehicle): violators.append(vehicle) return violators def find_violating_sims(self, consider_exempt=True): if not self.is_active: return [] if not self.privacy_violators & PrivacyViolators.SIM: return [] check_all_surfaces_on_level = not self._routing_surface_only nearby_sims = placement.get_nearby_sims_gen(self.central_object.position, self._routing_surface, radius=self.max_line_of_sight_radius, exclude=self._allowed_sims, only_sim_position=True, check_all_surfaces_on_level=check_all_surfaces_on_level) violators = [] for sim in nearby_sims: if consider_exempt and sim in self._exempt_sims: continue if any(sim_primitive.is_traversing_portal() for sim_primitive in sim.primitives if isinstance(sim_primitive, FollowPath)): continue if sim not in self._disallowed_sims and self.evaluate_sim(sim): continue if sims4.geometry.test_point_in_compound_polygon(sim.position, self.constraint.geometry.polygon): violators.append(sim) return violators def is_sim_shoo_exempt(self, sim): if sim in self._exempt_sims: return True if self.central_object.provided_routing_surface == sim.location.routing_surface: return False elif self._shoo_exempt_tests: resolver = SingleSimResolver(sim.sim_info) if self._shoo_exempt_tests.run_tests(resolver): return True return False def add_exempt_sim(self, sim): self._exempt_sims.add(sim) def _add_overrides_and_constraints_if_needed(self, violating_sims): for sim in self._allowed_sims: self.add_override_for_sim(sim) for sim in violating_sims: self._violators.add(sim) if sim in self._exempt_sims: continue liabilities = ((SHOO_LIABILITY, ShooLiability(self, sim)),) result = self._route_sim_away(sim, liabilities=liabilities) if result: self._pushed_interactions.append(result.interaction) def _cancel_unavailable_interactions(self, violating_sims): for sim in violating_sims: if sim in self._exempt_sims: continue interactions_to_cancel = set() if sim.queue.running is not None: interactions_to_cancel.add(sim.queue.running) for interaction in sim.si_state: if interaction.is_super: if interaction.target is not None: if sim.locked_from_obj_by_privacy(interaction.target): interactions_to_cancel.add(interaction) for interaction in sim.queue: if interaction.target is not None and sim.locked_from_obj_by_privacy(interaction.target): interactions_to_cancel.add(interaction) elif interaction.target is not None: break for interaction in interactions_to_cancel: interaction.cancel(FinishingType.INTERACTION_INCOMPATIBILITY, cancel_reason_msg='Canceled due to incompatibility with privacy instance.') def _route_sim_away(self, sim, liabilities=()): context = InteractionContext(sim, InteractionContext.SOURCE_SCRIPT, Priority.High, insert_strategy=QueueInsertStrategy.NEXT) from interactions.utils.satisfy_constraint_interaction import BuildAndForceSatisfyShooConstraintInteraction result = sim.push_super_affordance(BuildAndForceSatisfyShooConstraintInteraction, None, context, liabilities=liabilities, privacy_inst=self, name_override='BuildShooFromPrivacy') if result: if self._post_route_affordance is not None: def route_away_callback(_): post_route_context = context.clone_for_continuation(result.interaction) sim.push_super_affordance(self._post_route_affordance, None, post_route_context) result.interaction.register_on_finishing_callback(route_away_callback) else: logger.debug('Failed to push BuildAndForceSatisfyShooConstraintInteraction on Sim {} to route them out of a privacy area. Result: {}', sim, result, owner='tastle') if self.interaction is not None: self.interaction.cancel(FinishingType.TRANSITION_FAILURE, cancel_reason_msg='Failed to shoo Sims away.') return result def _cancel_pushed_interactions(self): for interaction in self._pushed_interactions: interaction.cancel(FinishingType.AUTO_EXIT, cancel_reason_msg='Privacy finished and is cleaning up.') self._pushed_interactions.clear() def handle_late_violator(self, sim): self._cancel_unavailable_interactions((sim,)) self.add_override_for_sim(sim) liabilities = ((LATE_SHOO_LIABILITY, LateShooLiability(self, sim)),) result = self._route_sim_away(sim, liabilities=liabilities) if not result: return if not self._violators: context = InteractionContext(sim, InteractionContext.SOURCE_SCRIPT, Priority.High, insert_strategy=QueueInsertStrategy.NEXT) if self.interaction is None: result = sim.push_super_affordance(self.embarrassed_affordance, sim, context) else: result = sim.push_super_affordance(self.embarrassed_affordance, self.interaction.get_participant(ParticipantType.Actor), context) if not result and not services.sim_spawner_service().sim_is_leaving(sim): logger.warn('Failed to push the embarrassed affordance on Sim {}. Interaction {}. Result {}. Context {} ', sim, self.interaction, result, context, owner='tastle') return self._late_violators.add(sim) def add_override_for_sim(self, sim): for footprint in self._privacy_constraints: sim.routing_context.ignore_footprint_contour(footprint.footprint_id) def remove_override_for_sim(self, sim): for footprint in self._privacy_constraints: sim.routing_context.remove_footprint_contour_override(footprint.footprint_id) @property def allowed_sims(self): return self._allowed_sims @property def disallowed_sims(self): return self._disallowed_sims def remove_sim_from_allowed_disallowed(self, sim): if sim in self._allowed_sims: self._allowed_sims.remove(sim) if sim in self._disallowed_sims: self._disallowed_sims.remove(sim) @property def violators(self): return self._violators def remove_violator(self, sim): self.remove_override_for_sim(sim) self._violators.discard(sim) @property def late_violators(self): return self._late_violators def remove_late_violator(self, sim): self.remove_override_for_sim(sim) self._late_violators.discard(sim)