Esempio n. 1
0
class Signal(object):
    """
    A Signal is callable. When called, it calls all the callables in its slots.
    """
    def __init__(self):
        self._slots = WeakValueDictionary()

    def __call__(self, *args, **kargs):
        for key in self._slots:
            func, _ = key
            func(self._slots[key], *args, **kargs)

    def connect(self, slot):
        """
        Slots must call this to register a callback method.
        :param slot: callable
        """
        key = (slot.im_func, id(slot.im_self))
        self._slots[key] = slot.im_self

    def disconnect(self, slot):
        """
        They can also unregister their callbacks here.
        :param slot: callable
        """
        key = (slot.im_func, id(slot.im_self))
        if key in self._slots:
            self._slots.pop(key)

    def clear(self):
        """
        Clears all slots
        """
        self._slots.clear()
class LRUCache:
	def __init__(self, max_size):
		self.LRU = [Node(time(), "none%s"%i) for i in range(max_size)]
		self.search = WeakValueDictionary()
		for i in self.LRU:
			self.search[i.name] = i
		
	def __setitem__(self, name, value):
		q = self.search.get(name, None)
		if q:
			q.data = value
			q.time = time()
		else:
			lru = self.LRU[0]
			self.search.pop(lru.name)
			lru.data = value
			lru.time = time()
			lru.name = name
			self.search[lru.name] = lru
		self.LRU.sort()
		
	def get(self, name, default=None):
		pos = None
		try:
			pos = self.search.__getitem__(name)
			pos.time = time()
			return pos.data
		except KeyError:
			if default is not None:
				return default
			else:
				raise
Esempio n. 3
0
class Signal(object):
    def __init__(self):
        self.__slots = WeakValueDictionary()

    def __call__(self, *args, **kargs):
        for key in self.__slots:
            func, selfid = key
            if selfid is not None:
                func(self.__slots[key], *args, **kargs)
            else:
                func(*args, **kargs)


    def __get_key(self, slot):
        if hasattr(slot, 'im_func'):
            return (slot.im_func, id(slot.im_self))
        else:
            return (slot, None)

    def connect(self, slot):
        key = self.__get_key(slot)
        if hasattr(slot, 'im_func'):
            self.__slots[key] = slot.im_self
        else:
            self.__slots[key] = slot

    def disconnect(self, slot):
        key = self.__get_key(slot)
        if key in self.__slots:
            self.__slots.pop(key)

    def clear(self):
        self.__slots.clear()
Esempio n. 4
0
class Signal(object):
    """
    A Signal is callable. When called, it calls all the callables in its slots.
    """
    def __init__(self):
        self._slots = WeakValueDictionary()

    def __call__(self, *args, **kargs):
        for key in self._slots:
            func, _ = key
            func(self._slots[key], *args, **kargs)

    def connect(self, slot):
        """
        Slots must call this to register a callback method.
        :param slot: callable
        """
        key = (slot.im_func, id(slot.im_self))
        self._slots[key] = slot.im_self

    def disconnect(self, slot):
        """
        They can also unregister their callbacks here.
        :param slot: callable
        """
        key = (slot.im_func, id(slot.im_self))
        if key in self._slots:
            self._slots.pop(key)

    def clear(self):
        """
        Clears all slots
        """
        self._slots.clear()
Esempio n. 5
0
class Signal(object):
    def __init__(self):
        self.__slots = WeakValueDictionary()

    def __call__(self, *args, **kargs):
        for key in self.__slots:
            func, selfid = key
            if selfid is not None:
                func(self.__slots[key], *args, **kargs)
            else:
                func(*args, **kargs)

    def __get_key(self, slot):
        if hasattr(slot, 'im_func'):
            return (slot.im_func, id(slot.im_self))
        else:
            return (slot, None)

    def connect(self, slot):
        key = self.__get_key(slot)
        if hasattr(slot, 'im_func'):
            self.__slots[key] = slot.im_self
        else:
            self.__slots[key] = slot

    def disconnect(self, slot):
        key = self.__get_key(slot)
        if key in self.__slots:
            self.__slots.pop(key)

    def clear(self):
        self.__slots.clear()
Esempio n. 6
0
class Signal(object):
    def __init__(self):
        self.__slots = WeakValueDictionary()

    def __call__(self, *args, **kargs):
        for key in self.__slots:
            func, _ = key
            func(self.__slots[key], *args, **kargs)

    def connect(self, slot):
        key = (slot.__func__, id(slot.__self__))
        self.__slots[key] = slot.__self__


    def disconnect(self, slot):
        key = (slot.__func__, id(slot.__self__))
        if key in self.__slots:
            self.__slots.pop(key)

    def clear(self):
        self.__slots.clear()

        ## Sample usage:
        #class Model(object):
        #  def __init__(self, value):
        #    self.__value = value
        #    self.changed = Signal()
        #
        #  def set_value(self, value):
        #    self.__value = value
        #    self.changed() # Emit signal
        #
        #  def get_value(self):
        #    return self.__value
        #
        #
        #class View(object):
        #  def __init__(self, model):
        #    self.model = model
        #    model.changed.connect(self.model_changed)
        #
        #  def model_changed(self):
        #    print "New value:", self.model.get_value()
        #
        #
        #model = Model(10)
        #view1 = View(model)
        #view2 = View(model)
        #view3 = View(model)
        #
        #model.set_value(20)
        #
        #del view1
        #model.set_value(30)
        #
        #model.changed.clear()
        #model.set_value(40)
        ### end of http://code.activestate.com/recipes/576477/ }}}
Esempio n. 7
0
class Signal(object):
    
    def __init__(self,sender,max_connections=0, exc_catch=True):
        self._maxconn=max_connections
        self._sender=sender
        self._exc_catch=exc_catch
        self._slots = WeakValueDictionary()
        self._lock = threading.Lock()

    @property
    def connected(self):
        return len(self._slots)

    def connect(self, slot):
        if self._maxconn>0 and len(self._slots)>=self._maxconn:
            raise SignalError("Maximum number of connections was exceeded")
        assert callable(slot), "Signal slots must be callable."
        # Check for **kwargs
        try:
            argspec = inspect.getargspec(slot)
        except TypeError:
            try:
                argspec = inspect.getargspec(slot.__call__)
            except (TypeError, AttributeError):
                argspec = None
        if argspec:
            assert argspec[2] is not None,  \
                "Signal receivers must accept keyword arguments (**kwargs)."
        self._lock.acquire()
        try:
            key = (slot.im_func, id(slot.im_self))
            self._slots[key] = slot.im_self
        finally:
            self._lock.release()

    def disconnect(self, slot):
        self._lock.acquire()
        try:
            key = (slot.im_func, id(slot.im_self))
            if key in self._slots: self._slots.pop(key)
        finally:
            self._lock.release()

    def __call__(self,*args,**kwargs):
        assert not kwargs.has_key("sender"),  \
                "'sender' keyword argument is occupied"
        responses = []
        kwargs["sender"]=self._sender
        for key in self._slots:
            func, _ = key
            try:
                response=func(self._slots[key], *args, **kwargs)
                responses.append((func,response))
            except Exception, err:
                if self._exc_catch: self.exception("Slot {0} exception: {1}".format(str(func), err))
                else: raise Exception(traceback.format_exc())
        return responses    
Esempio n. 8
0
class Monitor(QObject):
	"""File monitor

	This monitor can be used to track single files
	"""

	def __init__(self, **kwargs):
		super(Monitor, self).__init__(**kwargs)

		self.watched = WeakValueDictionary()
		self.delMapper = QSignalMapper(self)
		self.delMapper.mapped[str].connect(self.unmonitorFile)

		self.watcher = MonitorWithRename(parent=self)
		self.watcher.fileChanged.connect(self._onFileChanged)

	def monitorFile(self, path):
		"""Monitor a file and return an object that tracks only `path`

		:rtype: SingleFileWatcher
		:return: an object tracking `path`, the same object is returned if the method is called
		         with the same path.
		"""
		path = os.path.abspath(path)

		self.watcher.addPath(path)

		proxy = self.watched.get(path)
		if not proxy:
			proxy = SingleFileWatcher(path)
			proxy.destroyed.connect(self.delMapper.map)
			self.delMapper.setMapping(proxy, path)
			self.watched[path] = proxy

		return proxy

	@Slot(str)
	def unmonitorFile(self, path):
		"""Stop monitoring a file

		Since there is only one :any:`SingleFileWatcher` object per path, all objects monitoring
		`path` will not receive notifications anymore.
		To let only one object stop monitoring the file, simply disconnect its `modified` signal.
		When the :any:`SingleFileWatcher` object returned by method :any:`monitorFile` is
		destroyed, the file is automatically un-monitored.
		"""
		path = os.path.abspath(path)

		self.watcher.removePath(path)
		self.watched.pop(path, None)

	@Slot(str)
	def _onFileChanged(self, path):
		proxy = self.watched.get(path)
		if proxy:
			proxy.modified.emit()
Esempio n. 9
0
class Monitor(QObject):
    """File monitor

	This monitor can be used to track single files
	"""
    def __init__(self, **kwargs):
        super(Monitor, self).__init__(**kwargs)

        self.watched = WeakValueDictionary()
        self.delMapper = QSignalMapper(self)
        self.delMapper.mapped[str].connect(self.unmonitorFile)

        self.watcher = MonitorWithRename(parent=self)
        self.watcher.fileChanged.connect(self._onFileChanged)

    def monitorFile(self, path):
        """Monitor a file and return an object that tracks only `path`

		:rtype: SingleFileWatcher
		:return: an object tracking `path`, the same object is returned if the method is called
		         with the same path.
		"""
        path = os.path.abspath(path)

        self.watcher.addPath(path)

        proxy = self.watched.get(path)
        if not proxy:
            proxy = SingleFileWatcher(path)
            proxy.destroyed.connect(self.delMapper.map)
            self.delMapper.setMapping(proxy, path)
            self.watched[path] = proxy

        return proxy

    @Slot(str)
    def unmonitorFile(self, path):
        """Stop monitoring a file

		Since there is only one :any:`SingleFileWatcher` object per path, all objects monitoring
		`path` will not receive notifications anymore.
		To let only one object stop monitoring the file, simply disconnect its `modified` signal.
		When the :any:`SingleFileWatcher` object returned by method :any:`monitorFile` is
		destroyed, the file is automatically un-monitored.
		"""
        path = os.path.abspath(path)

        self.watcher.removePath(path)
        self.watched.pop(path, None)

    @Slot(str)
    def _onFileChanged(self, path):
        proxy = self.watched.get(path)
        if proxy:
            proxy.modified.emit()
Esempio n. 10
0
class Subscriber(Link, MutableMapping):
    def __init__(self, connect):
        super(Subscriber, self).__init__()
        self._dict = WeakValueDictionary()
        self._names = WeakKeyDictionary()
        self.connect = connect
        self.call()

    def subscribe(self, obj):
        self.__setitem__('_', obj)

    def unsubscribe(self, obj):
        wr = get_wrapper(obj)
        self._unsubscribe(wr)
        keys = wr._unsubscribe(self)
        keys.append(self._names[wr])
        for key in keys:
            self._dict.pop(key, None)

    def __setitem__(self, key, obj):
        wr = get_wrapper(obj)
        self._names[wr] = key
        self._subscribe(wr)
        keys = wr._subscribe(self)
        keys.append(key)
        assert not(key != '_' and key in self._dict), 'same name'
        for key in keys:
            self._dict[key] = wr.obj
        return obj

    def __getitem__(self, key):
        return self._dict[key]

    def __delitem__(self, key):
        self.unsubscribe(self[key])

    def __hash__(self):
        return Link.__hash__(self)

    def kill(self):
        for obj in set(self.links):
            self.unsubscribe(obj)
        super(Subscriber, self).kill()

    def call(self):
        pass

    def send(self, data):
        self.connect.send(data)

    def receive(self, data):
        receive(self, data)
Esempio n. 11
0
class DictRegistryBase(RegistryBase, MappingRegistryMixin):
    """
    A registry class with dict-based item storage.

    Note that the registry just owns a weak reference to the items.
    """

    key_error_class = KeyError

    def __init__(self):
        self.mapping = WeakValueDictionary()

    def perform_cache_clear(self):
        self.mapping.clear()

    def register(self, key, value):
        """
        Registration.

        If the given key has already existed, the function will raise a
        KeyError.
        """

        if key in self.mapping:
            raise self.key_error_class(key)

        self.mapping[key] = value
        self.perform_register(key, value)

    def perform_register(self, key, value):
        pass

    def unregister(self, key):
        """
        Unregistration.

        If the given key does not exist, the function will fail silently.
        """
        self.mapping.pop(key, None)

        self.perform_unregister(key)

    def perform_unregister(self, key):
        pass

    def __getitem__(self, key):
        """
        Shortcuts to access registered items.
        """
        return self.mapping[key]
Esempio n. 12
0
class Signal(object):
    def __init__(self):
        self.__slots = WeakValueDictionary()
    def __call__(self, *args, **kargs):
        for key in self.__slots:
            func, _ = key
            func(self.__slots[key], *args, **kargs)
    def connect(self, slot):
        key = (slot.__func__, id(slot.__self__))
        self.__slots[key] = slot.__self__
    def disconnect(self, slot):
        key = (slot.__func__, id(slot.__self__))
        if key in self.__slots:
            self.__slots.pop(key)
    def clear(self):
        self.__slots.clear()
Esempio n. 13
0
class Signal(object):
    def __init__(self):
        self.__slots = WeakValueDictionary()

    def __call__(self, *args, **kargs):
        for key in self.__slots:
            func, _ = key
            func(self.__slots[key], *args, **kargs)

    def connect(self, slot):
        key = (slot.__func__, id(slot.__self__))
        self.__slots[key] = slot.__self__

    def disconnect(self, slot):
        key = (slot.__func__, id(slot.__self__))
        if key in self.__slots:
            self.__slots.pop(key)

    def clear(self):
        self.__slots.clear()
Esempio n. 14
0
class WSBroadcaster:
    def __init__(self):
        self.wsid_iter = count()
        self.websockets = WeakValueDictionary()

    def add_ws(self, ws, username):
        """Add a websocket to the pool, return its id"""
        wsid = next(self.wsid_iter)
        self.websockets[(username, wsid)] = ws
        self.broadcast_connected_users()
        return wsid

    def remove_ws(self, username, wsid):
        self.websockets.pop((username, wsid))
        self.broadcast_connected_users()

    def list_connected_users(self):
        return sorted(set(username for username, wsid in self.websockets))

    def broadcast_connected_users(self):
        self.broadcast({
            'type': 'connected_users',
            'users': self.list_connected_users(),
        })

    def broadcast(self, message):
        if message['type'] == 'preview':
            fn = self.send_bytes
            data = BSON.encode(message)
        else:
            fn = self.send_str
            data = json.dumps(message)
        for ws in self.websockets.values():
            create_task(fn(ws, data))

    async def send_str(self, ws, data):
        await ws.send_str(data)

    async def send_bytes(self, ws, data):
        await ws.send_bytes(data)
Esempio n. 15
0
class Signal(object):
    """A lightweight Signal class when Qt is not installed."""
    def __init__(self, *types):
        """Create the Signal object. The type signatures are ignored."""
        self.__slots = WeakValueDictionary()

    def emit(self, *args, **kwargs):
        """Emit the signal, call all slots that are connected."""
        for key in self.__slots:
            func, _ = key
            func(self.__slots[key], *args, **kwargs)

    def connect(self, slot):
        """Connect this signal to a slot."""
        key = (slot.im_func, id(slot.im_self))
        self.__slots[key] = slot.im_self

    def disconnect(self, slot):
        """Disconnect this signal from a slot."""
        key = (slot.im_func, id(slot.im_self))
        if key in self.__slots:
            self.__slots.pop(key)
Esempio n. 16
0
class Station(object):
    def __init__(self):
        self._id_2_protocol = WeakValueDictionary()
        self._unique_id_factory = UniqueIDFactory()

    def generate_id(self):
        return self._unique_id_factory.generate_id()

    def register(self, protocol):
        self._id_2_protocol[protocol.id] = protocol

    def unregister(self, protocol):
        self._id_2_protocol.pop(protocol.id, None)

    def outgoing_wcml_message(self, message):
        raise NotImplementedError

    def incoming_wcml_message(self, message):
        raise NotImplementedError

    def start_service(self):
        raise NotImplementedError
Esempio n. 17
0
class Signal(object):
    """
    Signal slot object, used for handling events passed to the objects in the gui.
    """
    def __init__(self):
        self.__slots = WeakValueDictionary()

    def __call__(self, *args, **kargs):
        for key in self.__slots:
            func, _ = key
            func(self.__slots[key], *args, **kargs)

    def connect(self, slot):
        key = (slot.im_func, id(slot.im_self))
        self.__slots[key] = slot.im_self

    def disconnect(self, slot):
        key = (slot.im_func, id(slot.im_self))
        if key in self.__slots:
            self.__slots.pop(key)

    def clear(self):
        self.__slots.clear()
Esempio n. 18
0
class Signal(object):

    def __init__(self):
        self.__slots = WeakValueDictionary()

        # For keeping references to _FuncHost objects.
        self.__funchosts = {}

    def __call__(self, *args, **kargs):
        for key in self.__slots:
            func, _ = key
            func(self.__slots[key], *args, **kargs)

    def connect(self, slot):
        if inspect.ismethod(slot):
            key = (slot.im_func, id(slot.im_self))
            self.__slots[key] = slot.im_self
        else:
            host = _FuncHost(slot)
            self.connect(host.meth)
            # We stick a copy in here just to keep the instance alive.
            self.__funchosts[slot] = host

    def disconnect(self, slot):
        if inspect.ismethod(slot):
            key = (slot.im_func, id(slot.im_self))
            if key in self.__slots:
                self.__slots.pop(key)
        else:
            if slot in self.__funchosts:
                self.disconnect(self.__funchosts[slot].meth)
                self.__funchosts.pop(slot)

    def clear(self):
        self.__slots.clear()
        self.__funchosts.clear()
Esempio n. 19
0
class Layout(object):
    def __init__(self):
        self.solver = Solver()
        self.introns = WeakValueDictionary()

    def update(self, element):
        self.solver.autosolve = False
        for constraint in element.constraints[1]:
            self.solver.remove_constraint(constraint)
        for constraint in element.constraints[0]:
            self.solver.add_constraint(constraint)
        element.constraints = [], element.constraints[0]
        self.solver.autosolve = True

    def add_intron(self, source, intron):
        self.introns[source] = intron

    def pop_intron(self, source):
        return self.introns.pop(source)

    def get_intron(self, source, otherwise=None):
        return self.introns.get(source, otherwise)
Esempio n. 20
0
class Boss:

    def __init__(self, os_window_id, opts, args):
        self.window_id_map = WeakValueDictionary()
        self.os_window_map = {}
        self.cursor_blinking = True
        self.shutting_down = False
        talk_fd = getattr(single_instance, 'socket', None)
        talk_fd = -1 if talk_fd is None else talk_fd.fileno()
        self.child_monitor = ChildMonitor(
            self.on_child_death,
            DumpCommands(args) if args.dump_commands or args.dump_bytes else None,
            talk_fd
        )
        set_boss(self)
        self.current_font_size = opts.font_size
        set_font_family(opts)
        self.opts, self.args = opts, args
        initialize_renderer()
        startup_session = create_session(opts, args)
        self.add_os_window(startup_session, os_window_id=os_window_id)

    def add_os_window(self, startup_session, os_window_id=None, wclass=None, wname=None, size=None, startup_id=None):
        dpi_changed = False
        if os_window_id is None:
            w, h = initial_window_size(self.opts) if size is None else size
            cls = wclass or self.args.cls or appname
            os_window_id = create_os_window(w, h, appname, wname or self.args.name or cls, cls)
            if startup_id:
                ctx = init_startup_notification(os_window_id, startup_id)
            dpi_changed = show_window(os_window_id)
            if startup_id:
                end_startup_notification(ctx)
        tm = TabManager(os_window_id, self.opts, self.args, startup_session)
        self.os_window_map[os_window_id] = tm
        if dpi_changed:
            self.on_dpi_change(os_window_id)

    def list_os_windows(self):
        for os_window_id, tm in self.os_window_map.items():
            yield {
                'id': os_window_id,
                'tabs': list(tm.list_tabs()),
            }

    def match_windows(self, match):
        field, exp = match.split(':', 1)
        pat = re.compile(exp)
        for tm in self.os_window_map.values():
            for tab in tm:
                for window in tab:
                    if window.matches(field, pat):
                        yield window

    def tab_for_window(self, window):
        for tm in self.os_window_map.values():
            for tab in tm:
                for w in tab:
                    if w.id == window.id:
                        return tab

    def match_tabs(self, match):
        field, exp = match.split(':', 1)
        pat = re.compile(exp)
        tms = tuple(self.os_window_map.values())
        found = False
        if field in ('title', 'id'):
            for tm in tms:
                for tab in tm:
                    if tab.matches(field, pat):
                        yield tab
                        found = True
        if not found:
            tabs = {self.tab_for_window(w) for w in self.match_windows(match)}
            for tab in tabs:
                if tab:
                    yield tab

    def set_active_window(self, window):
        for tm in self.os_window_map.values():
            for tab in tm:
                for w in tab:
                    if w.id == window.id:
                        if tab is not self.active_tab:
                            tm.set_active_tab(tab)
                        tab.set_active_window(w)
                        return

    def _new_os_window(self, args, cwd_from=None):
        sw = self.args_to_special_window(args, cwd_from) if args else None
        startup_session = create_session(self.opts, special_window=sw, cwd_from=cwd_from)
        self.add_os_window(startup_session)

    def new_os_window(self, *args):
        self._new_os_window(args)

    def new_os_window_with_cwd(self, *args):
        w = self.active_window
        cwd_from = w.child.pid if w is not None else None
        self._new_os_window(args, cwd_from)

    def add_child(self, window):
        self.child_monitor.add_child(window.id, window.child.pid, window.child.child_fd, window.screen)
        self.window_id_map[window.id] = window

    def peer_messages_received(self, messages):
        import json
        for msg in messages:
            msg = json.loads(msg.decode('utf-8'))
            if isinstance(msg, dict) and msg.get('cmd') == 'new_instance':
                startup_id = msg.get('startup_id')
                args, rest = parse_args(msg['args'][1:])
                args.args = rest
                opts = create_opts(args)
                session = create_session(opts, args)
                self.add_os_window(session, wclass=args.cls, wname=args.name, size=initial_window_size(opts), startup_id=startup_id)
            else:
                safe_print('Unknown message received from peer, ignoring')

    def handle_remote_cmd(self, cmd, window=None):
        response = None
        if self.opts.allow_remote_control:
            try:
                response = handle_cmd(self, window, cmd)
            except Exception as err:
                import traceback
                response = {'ok': False, 'error': str(err), 'tb': traceback.format_exc()}
        else:
            response = {'ok': False, 'error': 'Remote control is disabled. Add allow_remote_control yes to your kitty.conf'}
        if response is not None:
            if window is not None:
                window.send_cmd_response(response)

    def on_child_death(self, window_id):
        window = self.window_id_map.pop(window_id, None)
        if window is None:
            return
        os_window_id = window.os_window_id
        window.destroy()
        tm = self.os_window_map.get(os_window_id)
        if tm is None:
            return
        for tab in tm:
            if window in tab:
                break
        else:
            return
        tab.remove_window(window)
        if len(tab) == 0:
            tm.remove(tab)
            tab.destroy()
            if len(tm) == 0:
                if not self.shutting_down:
                    mark_os_window_for_close(os_window_id)
                    glfw_post_empty_event()

    def close_window(self, window=None):
        if window is None:
            window = self.active_window
        self.child_monitor.mark_for_close(window.id)

    def close_tab(self, tab=None):
        if tab is None:
            tab = self.active_tab
        for window in tab:
            self.close_window(window)

    def toggle_fullscreen(self):
        toggle_fullscreen()

    def start(self):
        if not getattr(self, 'io_thread_started', False):
            self.child_monitor.start()
            self.io_thread_started = True

    def activate_tab_at(self, os_window_id, x):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            tm.activate_tab_at(x)

    def on_window_resize(self, os_window_id, w, h, dpi_changed):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            if dpi_changed:
                if set_dpi_from_os_window(os_window_id):
                    self.on_dpi_change(os_window_id)
                else:
                    tm.resize()
            else:
                tm.resize()

    def increase_font_size(self):
        self.change_font_size(
            min(
                self.opts.font_size * 5, self.current_font_size +
                self.opts.font_size_delta))

    def decrease_font_size(self):
        self.change_font_size(
            max(
                MINIMUM_FONT_SIZE, self.current_font_size -
                self.opts.font_size_delta))

    def restore_font_size(self):
        self.change_font_size(self.opts.font_size)

    def _change_font_size(self, new_size=None):
        if new_size is not None:
            self.current_font_size = new_size
        old_cell_width, old_cell_height = viewport_for_window()[-2:]
        windows = tuple(filter(None, self.window_id_map.values()))
        resize_fonts(self.current_font_size)
        layout_sprite_map()
        prerender()
        for window in windows:
            window.screen.rescale_images(old_cell_width, old_cell_height)
            window.screen.refresh_sprite_positions()
        for tm in self.os_window_map.values():
            tm.resize()
            tm.refresh_sprite_positions()
        glfw_post_empty_event()

    def change_font_size(self, new_size):
        if new_size == self.current_font_size:
            return
        self._change_font_size(new_size)

    def on_dpi_change(self, os_window_id):
        self._change_font_size()

    @property
    def active_tab_manager(self):
        os_window_id = current_os_window()
        return self.os_window_map.get(os_window_id)

    @property
    def active_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            return tm.active_tab

    @property
    def active_window(self):
        t = self.active_tab
        if t is not None:
            return t.active_window

    def dispatch_special_key(self, key, scancode, action, mods):
        # Handles shortcuts, return True if the key was consumed
        key_action = get_shortcut(self.opts.keymap, mods, key, scancode)
        self.current_key_press_info = key, scancode, action, mods
        return self.dispatch_action(key_action)

    def dispatch_action(self, key_action):
        if key_action is not None:
            f = getattr(self, key_action.func, None)
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        tab = self.active_tab
        if tab is None:
            return False
        window = self.active_window
        if window is None:
            return False
        if key_action is not None:
            f = getattr(tab, key_action.func, getattr(window, key_action.func, None))
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        return False

    def combine(self, *actions):
        for key_action in actions:
            self.dispatch_action(key_action)

    def on_focus(self, os_window_id, focused):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.focus_changed(focused)

    def on_drop(self, os_window_id, paths):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.paste('\n'.join(paths))

    def on_os_window_closed(self, os_window_id, viewport_width, viewport_height):
        cached_values['window-size'] = viewport_width, viewport_height
        tm = self.os_window_map.pop(os_window_id, None)
        if tm is not None:
            tm.destroy()
        for window_id in tuple(w.id for w in self.window_id_map.values() if getattr(w, 'os_window_id', None) == os_window_id):
            self.window_id_map.pop(window_id, None)

    def display_scrollback(self, data):
        if self.opts.scrollback_in_new_tab:
            self.display_scrollback_in_new_tab(data)
        else:
            tab = self.active_tab
            if tab is not None:
                tab.new_special_window(
                    SpecialWindow(
                        self.opts.scrollback_pager, data, _('History')))

    def switch_focus_to(self, window_idx):
        tab = self.active_tab
        tab.set_active_window_idx(window_idx)
        old_focus = tab.active_window
        if not old_focus.destroyed:
            old_focus.focus_changed(False)
        tab.active_window.focus_changed(True)

    def open_url(self, url):
        if url:
            open_url(url, self.opts.open_url_with)

    def open_url_lines(self, lines):
        self.open_url(''.join(lines))

    def destroy(self):
        self.shutting_down = True
        self.child_monitor.shutdown_monitor()
        wakeup()
        self.child_monitor.join()
        del self.child_monitor
        for tm in self.os_window_map.values():
            tm.destroy()
        self.os_window_map = {}
        destroy_sprite_map()
        destroy_global_data()

    def paste_to_active_window(self, text):
        if text:
            w = self.active_window
            if w is not None:
                w.paste(text)

    def paste_from_clipboard(self):
        text = get_clipboard_string()
        self.paste_to_active_window(text)

    def paste_from_selection(self):
        text = get_primary_selection()
        self.paste_to_active_window(text)

    def set_primary_selection(self):
        w = self.active_window
        if w is not None and not w.destroyed:
            text = w.text_for_selection()
            if text:
                set_primary_selection(text)
                if self.opts.copy_on_select:
                    set_clipboard_string(text)

    def goto_tab(self, tab_num):
        tm = self.active_tab_manager
        if tm is not None:
            tm.goto_tab(tab_num - 1)

    def next_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab()

    def previous_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab(-1)

    def args_to_special_window(self, args, cwd_from=None):
        args = list(args)
        stdin = None
        w = self.active_window

        def data_for_at(arg):
            if arg == '@selection':
                return w.text_for_selection()
            if arg == '@ansi':
                return w.buffer_as_ansi()
            if arg == '@text':
                return w.buffer_as_text()

        if args[0].startswith('@'):
            stdin = data_for_at(args[0]) or None
            if stdin is not None:
                stdin = stdin.encode('utf-8')
            del args[0]

        cmd = []
        for arg in args:
            if arg == '@selection':
                arg = data_for_at(arg)
                if not arg:
                    continue
            cmd.append(arg)
        return SpecialWindow(cmd, stdin, cwd_from=cwd_from)

    def _new_tab(self, args, cwd_from=None):
        special_window = None
        if args:
            if isinstance(args, SpecialWindowInstance):
                special_window = args
            else:
                special_window = self.args_to_special_window(args, cwd_from=cwd_from)
        tm = self.active_tab_manager
        if tm is not None:
            tm.new_tab(special_window=special_window, cwd_from=cwd_from)

    def new_tab(self, *args):
        self._new_tab(args)

    def new_tab_with_cwd(self, *args):
        w = self.active_window
        cwd_from = w.child.pid if w is not None else None
        self._new_tab(args, cwd_from=cwd_from)

    def _new_window(self, args, cwd_from=None):
        tab = self.active_tab
        if tab is not None:
            if args:
                tab.new_special_window(self.args_to_special_window(args, cwd_from=cwd_from))
            else:
                tab.new_window(cwd_from=cwd_from)

    def new_window(self, *args):
        self._new_window(args)

    def new_window_with_cwd(self, *args):
        w = self.active_window
        cwd_from = w.child.pid if w is not None else None
        self._new_window(args, cwd_from=cwd_from)

    def move_tab_forward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(1)

    def move_tab_backward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(-1)

    def display_scrollback_in_new_tab(self, data):
        tm = self.active_tab_manager
        if tm is not None:
            tm.new_tab(special_window=SpecialWindow(
                self.opts.scrollback_pager, data, _('History')))
Esempio n. 21
0
class Agent(AgentT, Service):
    """Agent.

    This is the type of object returned by the ``@app.agent`` decorator.
    """
    supervisor: SupervisorStrategyT = None
    instances: MutableSequence[ActorRefT]

    # channel is loaded lazily on .channel property access
    # to make sure configuration is not accessed when agent created
    # at module-scope.
    _channel: Optional[ChannelT] = None
    _channel_arg: Optional[Union[str, ChannelT]]
    _channel_kwargs: Dict[str, Any]
    _channel_iterator: Optional[AsyncIterator] = None
    _sinks: List[SinkT]

    _actors: MutableSet[ActorRefT]
    _actor_by_partition: MutableMapping[TP, ActorRefT]

    #: This mutable set is used by the first agent we start,
    #: so that we can update its active_partitions later
    #: (in on_partitions_assigned, when we know what partitions we get).
    _pending_active_partitions: Optional[Set[TP]] = None

    _first_assignment_done: bool = False

    def __init__(self,
                 fun: AgentFun,
                 *,
                 app: AppT,
                 name: str = None,
                 channel: Union[str, ChannelT] = None,
                 concurrency: int = 1,
                 sink: Iterable[SinkT] = None,
                 on_error: AgentErrorHandler = None,
                 supervisor_strategy: Type[SupervisorStrategyT] = None,
                 help: str = None,
                 key_type: ModelArg = None,
                 value_type: ModelArg = None,
                 isolated_partitions: bool = False,
                 **kwargs: Any) -> None:
        self.app = app
        self.fun: AgentFun = fun
        self.name = name or canonshortname(self.fun)
        # key-type/value_type arguments only apply when a channel
        # is not set
        if key_type is not None:
            assert channel is None or isinstance(channel, str)
        self._key_type = key_type
        if value_type is not None:
            assert channel is None or isinstance(channel, str)
        self._value_type = value_type
        self._channel_arg = channel
        self._channel_kwargs = kwargs
        self.concurrency = concurrency or 1
        self.isolated_partitions = isolated_partitions
        self.help = help or ''
        self._sinks = list(sink) if sink is not None else []
        self._on_error: Optional[AgentErrorHandler] = on_error
        self.supervisor_strategy = supervisor_strategy
        self._actors = WeakSet()
        self._actor_by_partition = WeakValueDictionary()
        if self.isolated_partitions and self.concurrency > 1:
            raise ImproperlyConfigured(
                'Agent concurrency must be 1 when using isolated partitions')
        Service.__init__(self)

    async def _start_one(self,
                         *,
                         index: Optional[int] = None,
                         active_partitions: Optional[Set[TP]] = None,
                         stream: StreamT = None,
                         channel: ChannelT = None) -> ActorT:
        # an index of None means there's only one instance,
        # and `index is None` is used as a test by functions that
        # disallows concurrency.
        index = index if self.concurrency > 1 else None
        return await self._start_task(
            index=index,
            active_partitions=active_partitions,
            stream=stream,
            channel=channel,
            beacon=self.beacon,
        )

    async def _start_one_supervised(
            self,
            index: Optional[int] = None,
            active_partitions: Optional[Set[TP]] = None,
            stream: StreamT = None) -> ActorT:
        aref = await self._start_one(
            index=index,
            active_partitions=active_partitions,
            stream=stream,
        )
        self.supervisor.add(aref)
        await aref.maybe_start()
        return aref

    async def _start_for_partitions(self,
                                    active_partitions: Set[TP]) -> ActorT:
        assert active_partitions
        self.log.info('Starting actor for partitions %s', active_partitions)
        return await self._start_one_supervised(None, active_partitions)

    async def on_start(self) -> None:
        self.supervisor = self._new_supervisor()
        await self._on_start_supervisor()

    def _new_supervisor(self) -> SupervisorStrategyT:
        return self._get_supervisor_strategy()(
            max_restarts=100.0,
            over=1.0,
            replacement=self._replace_actor,
            loop=self.loop,
            beacon=self.beacon,
        )

    async def _replace_actor(self, service: ServiceT, index: int) -> ServiceT:
        aref = cast(ActorRefT, service)
        return await self._start_one(
            index=index,
            active_partitions=aref.active_partitions,
            stream=aref.stream,
            channel=cast(ChannelT, aref.stream.channel),
        )

    def _get_supervisor_strategy(self) -> Type[SupervisorStrategyT]:
        SupervisorStrategy = self.supervisor_strategy
        if SupervisorStrategy is None:
            SupervisorStrategy = self.app.conf.agent_supervisor
        return SupervisorStrategy

    async def _on_start_supervisor(self) -> None:
        active_partitions = self._get_active_partitions()
        channel: ChannelT = cast(ChannelT, None)
        for i in range(self.concurrency):
            res = await self._start_one(
                index=i,
                active_partitions=active_partitions,
                channel=channel,
            )
            if channel is None:
                # First concurrency actor creates channel,
                # then we reuse it for --concurrency=n.
                # This way they share the same queue.
                channel = res.stream.channel
            self.supervisor.add(res)
        await self.supervisor.start()

    def _get_active_partitions(self) -> Optional[Set[TP]]:
        active_partitions: Optional[Set[TP]] = None
        if self.isolated_partitions:
            # when we start our first agent, we create the set of
            # partitions early, and save it in ._pending_active_partitions.
            # That way we can update the set once partitions are assigned,
            # and the actor we started may be assigned one of the partitions.
            active_partitions = self._pending_active_partitions = set()
        return active_partitions

    async def on_stop(self) -> None:
        # Agents iterate over infinite streams, so we cannot wait for it
        # to stop.
        # Instead we cancel it and this forces the stream to ack the
        # last message processed (but not the message causing the error
        # to be raised).
        await self._stop_supervisor()

    async def _stop_supervisor(self) -> None:
        if self.supervisor:
            await self.supervisor.stop()
            self.supervisor = None

    def cancel(self) -> None:
        for actor in self._actors:
            actor.cancel()

    async def on_partitions_revoked(self, revoked: Set[TP]) -> None:
        T = self.app.traced
        if self.isolated_partitions:
            # isolated: start/stop actors for each partition
            await T(self.on_isolated_partitions_revoked)(revoked)
        else:
            await T(self.on_shared_partitions_revoked)(revoked)

    async def on_partitions_assigned(self, assigned: Set[TP]) -> None:
        T = self.app.traced
        if self.isolated_partitions:
            await T(self.on_isolated_partitions_assigned)(assigned)
        else:
            await T(self.on_shared_partitions_assigned)(assigned)

    async def on_isolated_partitions_revoked(self, revoked: Set[TP]) -> None:
        self.log.dev('Partitions revoked')
        T = self.app.traced
        for tp in revoked:
            aref: Optional[ActorRefT] = self._actor_by_partition.pop(tp, None)
            if aref is not None:
                await T(aref.on_isolated_partition_revoked)(tp)

    async def on_isolated_partitions_assigned(self, assigned: Set[TP]) -> None:
        T = self.app.traced
        for tp in sorted(assigned):
            await T(self._assign_isolated_partition)(tp)

    async def _assign_isolated_partition(self, tp: TP) -> None:
        T = self.app.traced
        if (not self._first_assignment_done and
                not self._actor_by_partition):
            self._first_assignment_done = True
            # if this is the first time we are assigned
            # we need to reassign the agent we started at boot to
            # one of the partitions.
            T(self._on_first_isolated_partition_assigned)(tp)
        await T(self._maybe_start_isolated)(tp)

    def _on_first_isolated_partition_assigned(self, tp: TP) -> None:
        assert self._actors
        assert len(self._actors) == 1
        self._actor_by_partition[tp] = next(iter(self._actors))
        if self._pending_active_partitions is not None:
            assert not self._pending_active_partitions
            self._pending_active_partitions.add(tp)

    async def _maybe_start_isolated(self, tp: TP) -> None:
        try:
            aref = self._actor_by_partition[tp]
        except KeyError:
            aref = await self._start_isolated(tp)
            self._actor_by_partition[tp] = aref
        await aref.on_isolated_partition_assigned(tp)

    async def _start_isolated(self, tp: TP) -> ActorT:
        return await self._start_for_partitions({tp})

    async def on_shared_partitions_revoked(self, revoked: Set[TP]) -> None:
        ...

    async def on_shared_partitions_assigned(self, assigned: Set[TP]) -> None:
        ...

    def info(self) -> Mapping:
        return {
            'app': self.app,
            'fun': self.fun,
            'name': self.name,
            'channel': self.channel,
            'concurrency': self.concurrency,
            'help': self.help,
            'sinks': self._sinks,
            'on_error': self._on_error,
            'supervisor_strategy': self.supervisor_strategy,
            'isolated_partitions': self.isolated_partitions,
        }

    def clone(self, *, cls: Type[AgentT] = None, **kwargs: Any) -> AgentT:
        return (cls or type(self))(**{**self.info(), **kwargs})

    def test_context(self,
                     channel: ChannelT = None,
                     supervisor_strategy: SupervisorStrategyT = None,
                     on_error: AgentErrorHandler = None,
                     **kwargs: Any) -> AgentTestWrapperT:  # pragma: no cover
        # flow control into channel queues are disabled at startup,
        # so need to resume that.
        self.app.flow_control.resume()

        async def on_agent_error(agent: AgentT, exc: BaseException) -> None:
            if on_error is not None:
                await on_error(agent, exc)
            await agent.crash_test_agent(exc)

        return self.clone(
            cls=AgentTestWrapper,
            channel=channel if channel is not None else self.app.channel(),
            supervisor_strategy=supervisor_strategy or CrashingSupervisor,
            original_channel=self.channel,
            on_error=on_agent_error,
            **kwargs)

    def _prepare_channel(self,
                         channel: Union[str, ChannelT] = None,
                         internal: bool = True,
                         key_type: ModelArg = None,
                         value_type: ModelArg = None,
                         **kwargs: Any) -> ChannelT:
        app = self.app
        channel = f'{app.conf.id}-{self.name}' if channel is None else channel
        if isinstance(channel, ChannelT):
            return cast(ChannelT, channel)
        elif isinstance(channel, str):
            return app.topic(
                channel,
                internal=internal,
                key_type=key_type,
                value_type=value_type,
                **kwargs)
        raise TypeError(
            f'Channel must be channel, topic, or str; not {type(channel)}')

    def __call__(self, *,
                 index: int = None,
                 active_partitions: Set[TP] = None,
                 stream: StreamT = None,
                 channel: ChannelT = None) -> ActorRefT:
        # The agent function can be reused by other agents/tasks.
        # For example:
        #
        #   @app.agent(logs_topic, through='other-topic')
        #   filter_log_errors_(stream):
        #       async for event in stream:
        #           if event.severity == 'error':
        #               yield event
        #
        #   @app.agent(logs_topic)
        #   def alert_on_log_error(stream):
        #       async for event in filter_log_errors(stream):
        #            alert(f'Error occurred: {event!r}')
        #
        # Calling `res = filter_log_errors(it)` will end you up with
        # an AsyncIterable that you can reuse (but only if the agent
        # function is an `async def` function that yields)
        if stream is None:
            stream = self.stream(
                channel=channel,
                concurrency_index=index,
                active_partitions=active_partitions,
            )
        else:
            # reusing actor stream after agent restart
            assert stream.concurrency_index == index
            assert stream.active_partitions == active_partitions
        return self.actor_from_stream(stream)

    def actor_from_stream(self, stream: StreamT) -> ActorRefT:
        res = self.fun(stream)
        typ = cast(Type[Actor],
                   (AwaitableActor
                    if isinstance(res, Awaitable) else AsyncIterableActor))
        return typ(
            self,
            stream,
            res,
            index=stream.concurrency_index,
            active_partitions=stream.active_partitions,
            loop=self.loop,
            beacon=self.beacon,
        )

    def add_sink(self, sink: SinkT) -> None:
        if sink not in self._sinks:
            self._sinks.append(sink)

    def stream(self,
               channel: ChannelT = None,
               active_partitions: Set[TP] = None,
               **kwargs: Any) -> StreamT:
        if channel is None:
            channel = cast(TopicT, self.channel_iterator).clone(
                is_iterator=False,
                active_partitions=active_partitions,
            )
        if active_partitions is not None:
            assert channel.active_partitions == active_partitions
        s = self.app.stream(
            channel,
            loop=self.loop,
            active_partitions=active_partitions,
            **kwargs)
        s.add_processor(self._maybe_unwrap_reply_request)
        return s

    def _maybe_unwrap_reply_request(self, value: V) -> Any:
        if isinstance(value, ReqRepRequest):
            return value.value
        return value

    async def _start_task(self,
                          *,
                          index: Optional[int],
                          active_partitions: Optional[Set[TP]] = None,
                          stream: StreamT = None,
                          channel: ChannelT = None,
                          beacon: NodeT = None) -> ActorRefT:
        # If the agent is an async function we simply start it,
        # if it returns an AsyncIterable/AsyncGenerator we start a task
        # that will consume it.
        actor = self(
            index=index,
            active_partitions=active_partitions,
            stream=stream,
            channel=channel,
        )
        return await self._prepare_actor(actor, beacon)

    async def _prepare_actor(self, aref: ActorRefT,
                             beacon: NodeT) -> ActorRefT:
        coro: Any
        if isinstance(aref, Awaitable):
            # agent does not yield
            coro = aref
            if self._sinks:
                raise ImproperlyConfigured('Agent must yield to use sinks')
        else:
            # agent yields and is an AsyncIterator so we have to consume it.
            coro = self._slurp(aref, aiter(aref))
        task = asyncio.Task(self._execute_task(coro, aref), loop=self.loop)
        task._beacon = beacon  # type: ignore
        aref.actor_task = task
        self._actors.add(aref)
        return aref

    async def _execute_task(self, coro: Awaitable, aref: ActorRefT) -> None:
        # This executes the agent task itself, and does exception handling.
        _current_agent.set(self)
        try:
            await coro
        except asyncio.CancelledError:
            if self.should_stop:
                raise
        except Exception as exc:
            if self._on_error is not None:
                await self._on_error(self, exc)

            # Mark ActorRef as dead, so that supervisor thread
            # can start a new one.
            await aref.crash(exc)
            self.supervisor.wakeup()
            raise

    async def _slurp(self, res: ActorRefT, it: AsyncIterator) -> None:
        # this is used when the agent returns an AsyncIterator,
        # and simply consumes that async iterator.
        stream: Optional[StreamT] = None
        async for value in it:
            self.log.debug('%r yielded: %r', self.fun, value)
            if stream is None:
                stream = res.stream.get_active_stream()
            event = stream.current_event
            if event is not None and isinstance(event.value, ReqRepRequest):
                await self._reply(event.key, value, event.value)
            await self._delegate_to_sinks(value)

    async def _delegate_to_sinks(self, value: Any) -> None:
        for sink in self._sinks:
            if isinstance(sink, AgentT):
                await cast(AgentT, sink).send(value=value)
            elif isinstance(sink, ChannelT):
                await cast(TopicT, sink).send(value=value)
            else:
                await maybe_async(cast(Callable, sink)(value))

    async def _reply(self, key: Any, value: Any, req: ReqRepRequest) -> None:
        assert req.reply_to
        response = self._response_class(value)(
            key=key,
            value=value,
            correlation_id=req.correlation_id,
        )
        await self.app.send(
            req.reply_to,
            key=None,
            value=response,
        )

    def _response_class(self, value: Any) -> Type[ReqRepResponse]:
        if isinstance(value, ModelT):
            return ModelReqRepResponse
        return ReqRepResponse

    async def cast(self,
                   value: V = None,
                   *,
                   key: K = None,
                   partition: int = None,
                   timestamp: float = None) -> None:
        await self.send(
            key=key,
            value=value,
            partition=partition,
            timestamp=timestamp,
        )

    async def ask(self,
                  value: V = None,
                  *,
                  key: K = None,
                  partition: int = None,
                  timestamp: float = None,
                  reply_to: ReplyToArg = None,
                  correlation_id: str = None) -> Any:
        p = await self.ask_nowait(
            value,
            key=key,
            partition=partition,
            timestamp=timestamp,
            reply_to=reply_to or self.app.conf.reply_to,
            correlation_id=correlation_id,
            force=True,  # Send immediately, since we are waiting for result.
        )
        app = cast(App, self.app)
        await app._reply_consumer.add(p.correlation_id, p)
        await app.maybe_start_client()
        return await p

    async def ask_nowait(self,
                         value: V = None,
                         *,
                         key: K = None,
                         partition: int = None,
                         timestamp: float = None,
                         reply_to: ReplyToArg = None,
                         correlation_id: str = None,
                         force: bool = False) -> ReplyPromise:
        req = self._create_req(key, value, reply_to, correlation_id)
        await self.channel.send(
            key=key,
            value=req,
            partition=partition,
            timestamp=timestamp,
            force=force,
        )
        return ReplyPromise(req.reply_to, req.correlation_id)

    def _create_req(self,
                    key: K = None,
                    value: V = None,
                    reply_to: ReplyToArg = None,
                    correlation_id: str = None) -> ReqRepRequest:
        if reply_to is None:
            raise TypeError('Missing reply_to argument')
        topic_name = self._get_strtopic(reply_to)
        correlation_id = correlation_id or str(uuid4())
        return self._request_class(value)(
            value=value,
            reply_to=topic_name,
            correlation_id=correlation_id,
        )

    def _request_class(self, value: V) -> Type[ReqRepRequest]:
        if isinstance(value, ModelT):
            return ModelReqRepRequest
        return ReqRepRequest

    async def send(self,
                   *,
                   key: K = None,
                   value: V = None,
                   partition: int = None,
                   timestamp: float = None,
                   key_serializer: CodecArg = None,
                   value_serializer: CodecArg = None,
                   callback: MessageSentCallback = None,
                   reply_to: ReplyToArg = None,
                   correlation_id: str = None,
                   force: bool = False) -> Awaitable[RecordMetadata]:
        """Send message to topic used by agent."""
        if reply_to:
            value = self._create_req(key, value, reply_to, correlation_id)
        return await self.channel.send(
            key=key,
            value=value,
            partition=partition,
            timestamp=timestamp,
            key_serializer=key_serializer,
            value_serializer=value_serializer,
            force=force,
        )

    def _get_strtopic(self,
                      topic: Union[str, ChannelT, TopicT, AgentT]) -> str:
        if isinstance(topic, AgentT):
            return self._get_strtopic(cast(AgentT, topic).channel)
        if isinstance(topic, TopicT):
            return cast(TopicT, topic).get_topic_name()
        if isinstance(topic, ChannelT):
            raise ValueError('Channels are unnamed topics')
        return cast(str, topic)

    async def map(self,
                  values: Union[AsyncIterable, Iterable],
                  key: K = None,
                  reply_to: ReplyToArg = None,
                  ) -> AsyncIterator:  # pragma: no cover
        # Map takes only values, but can provide one key that is used for all.
        async for value in self.kvmap(
                ((key, v) async for v in aiter(values)), reply_to):
            yield value

    async def kvmap(
            self,
            items: Union[AsyncIterable[Tuple[K, V]], Iterable[Tuple[K, V]]],
            reply_to: ReplyToArg = None,
    ) -> AsyncIterator[str]:  # pragma: no cover
        # kvmap takes (key, value) pairs.
        reply_to = self._get_strtopic(reply_to or self.app.conf.reply_to)

        # BarrierState is the promise that keeps track of pending results.
        # It contains a list of individual ReplyPromises.
        barrier = BarrierState(reply_to)

        async for _ in self._barrier_send(barrier, items, reply_to):
            # Now that we've sent a message, try to see if we have any
            # replies.
            try:
                _, val = barrier.get_nowait()
            except asyncio.QueueEmpty:
                pass
            else:
                yield val
        # All the messages have been sent so finalize the barrier.
        barrier.finalize()

        # Then iterate over the results in the group.
        async for _, value in barrier.iterate():
            yield value

    async def join(self,
                   values: Union[AsyncIterable[V], Iterable[V]],
                   key: K = None,
                   reply_to: ReplyToArg = None,
                   ) -> List[Any]:  # pragma: no cover
        return await self.kvjoin(
            ((key, value) async for value in aiter(values)),
            reply_to=reply_to,
        )

    async def kvjoin(
            self,
            items: Union[AsyncIterable[Tuple[K, V]], Iterable[Tuple[K, V]]],
            reply_to: ReplyToArg = None) -> List[Any]:  # pragma: no cover
        reply_to = self._get_strtopic(reply_to or self.app.conf.reply_to)
        barrier = BarrierState(reply_to)

        # Map correlation_id -> index
        posindex: MutableMapping[str, int] = {
            cid: i
            async for i, cid in aenumerate(
                self._barrier_send(barrier, items, reply_to))
        }

        # All the messages have been sent so finalize the barrier.
        barrier.finalize()

        # wait until all replies received
        await barrier
        # then construct a list in the correct order.
        values: List = [None] * barrier.total
        async for correlation_id, value in barrier.iterate():
            values[posindex[correlation_id]] = value
        return values

    async def _barrier_send(
            self, barrier: BarrierState,
            items: Union[AsyncIterable[Tuple[K, V]], Iterable[Tuple[K, V]]],
            reply_to: ReplyToArg) -> AsyncIterator[str]:  # pragma: no cover
        # map: send many tasks to agents
        # while trying to pop incoming results off.
        async for key, value in aiter(items):
            correlation_id = str(uuid4())
            p = await self.ask_nowait(
                key=key,
                value=value,
                reply_to=reply_to,
                correlation_id=correlation_id)
            # add reply promise to the barrier
            barrier.add(p)

            # the ReplyConsumer will call the barrier whenever a new
            # result comes in.
            app = cast(App, self.app)
            await app.maybe_start_client()
            await app._reply_consumer.add(p.correlation_id, barrier)

            yield correlation_id

    def _repr_info(self) -> str:
        return shorten_fqdn(self.name)

    def get_topic_names(self) -> Iterable[str]:
        channel = self.channel
        if isinstance(channel, TopicT):
            return channel.topics
        return []

    @property
    def channel(self) -> ChannelT:
        if self._channel is None:
            self._channel = self._prepare_channel(
                self._channel_arg,
                key_type=self._key_type,
                value_type=self._value_type,
                **self._channel_kwargs,
            )
        return self._channel

    @channel.setter
    def channel(self, channel: ChannelT) -> None:
        self._channel = channel

    @property
    def channel_iterator(self) -> AsyncIterator:
        # The channel is "memoized" here, so subsequent access to
        # instance.channel_iterator will return the same value.
        if self._channel_iterator is None:
            # we do not use aiter(channel) here, because
            # that will also add it to the topic conductor too early.
            self._channel_iterator = self.channel.clone(is_iterator=True)
        return self._channel_iterator

    @channel_iterator.setter
    def channel_iterator(self, it: AsyncIterator) -> None:
        self._channel_iterator = it

    @property
    def label(self) -> str:
        return self._agent_label()

    def _agent_label(self, name_suffix: str = '') -> str:
        s = f'{type(self).__name__}{name_suffix}: '
        s += f'{shorten_fqdn(qualname(self.fun))}'
        return s

    @property
    def shortlabel(self) -> str:
        return self._agent_label()
Esempio n. 22
0
class Boss:
    def __init__(self, glfw_window, opts, args):
        self.window_id_map = WeakValueDictionary()
        startup_session = create_session(opts, args)
        self.cursor_blinking = True
        self.window_is_focused = True
        self.glfw_window_title = None
        self.shutting_down = False
        self.child_monitor = ChildMonitor(
            glfw_window.window_id(), self.on_child_death,
            DumpCommands(args)
            if args.dump_commands or args.dump_bytes else None)
        set_boss(self)
        self.current_font_size = opts.font_size
        cell_size.width, cell_size.height = set_font_family(opts)
        self.opts, self.args = opts, args
        self.glfw_window = glfw_window
        glfw_window.framebuffer_size_callback = self.on_window_resize
        glfw_window.window_focus_callback = self.on_focus
        load_shader_programs()
        self.tab_manager = TabManager(opts, args)
        self.tab_manager.init(startup_session)
        self.activate_tab_at = self.tab_manager.activate_tab_at
        layout_sprite_map(cell_size.width, cell_size.height,
                          render_cell_wrapper)

    @property
    def current_tab_bar_height(self):
        return self.tab_manager.tab_bar_height

    def __iter__(self):
        return iter(self.tab_manager)

    def iterwindows(self):
        for t in self:
            yield from t

    def add_child(self, window):
        self.child_monitor.add_child(window.id, window.child.pid,
                                     window.child.child_fd, window.screen)
        self.window_id_map[window.id] = window

    def on_child_death(self, window_id):
        w = self.window_id_map.pop(window_id, None)
        if w is not None:
            w.on_child_death()

    def close_window(self, window=None):
        if window is None:
            window = self.active_window
        self.child_monitor.mark_for_close(window.id)

    def close_tab(self, tab=None):
        if tab is None:
            tab = self.active_tab
        for window in tab:
            self.close_window(window)

    def start(self):
        if not getattr(self, 'io_thread_started', False):
            self.child_monitor.start()
            self.io_thread_started = True

    def on_window_resize(self, window, w, h):
        viewport_size.width, viewport_size.height = w, h
        self.tab_manager.resize()

    def increase_font_size(self):
        self.change_font_size(
            min(self.opts.font_size * 5,
                self.current_font_size + self.opts.font_size_delta))

    def decrease_font_size(self):
        self.change_font_size(
            max(MINIMUM_FONT_SIZE,
                self.current_font_size - self.opts.font_size_delta))

    def restore_font_size(self):
        self.change_font_size(self.opts.font_size)

    def change_font_size(self, new_size):
        if new_size == self.current_font_size:
            return
        self.current_font_size = new_size
        w, h = cell_size.width, cell_size.height
        windows = tuple(filter(None, self.window_id_map.values()))
        cell_size.width, cell_size.height = set_font_family(
            self.opts, override_font_size=self.current_font_size)
        layout_sprite_map(cell_size.width, cell_size.height,
                          render_cell_wrapper)
        for window in windows:
            window.screen.rescale_images(w, h)
        self.resize_windows_after_font_size_change()
        for window in windows:
            window.screen.refresh_sprite_positions()
        self.tab_manager.refresh_sprite_positions()

    def resize_windows_after_font_size_change(self):
        self.tab_manager.resize()
        glfw_post_empty_event()

    def tabbar_visibility_changed(self):
        self.tab_manager.resize(only_tabs=True)
        glfw_post_empty_event()

    @property
    def active_tab(self):
        return self.tab_manager.active_tab

    def is_tab_visible(self, tab):
        return self.active_tab is tab

    @property
    def active_window(self):
        t = self.active_tab
        if t is not None:
            return t.active_window

    def dispatch_special_key(self, key, scancode, action, mods):
        # Handles shortcuts, return True if the key was consumed
        key_action = get_shortcut(self.opts.keymap, mods, key, scancode)
        self.current_key_press_info = key, scancode, action, mods
        return self.dispatch_action(key_action)

    def dispatch_action(self, key_action):
        if key_action is not None:
            f = getattr(self, key_action.func, None)
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        tab = self.active_tab
        if tab is None:
            return False
        window = self.active_window
        if window is None:
            return False
        if key_action is not None:
            f = getattr(tab, key_action.func,
                        getattr(window, key_action.func, None))
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        key, scancode, action, mods = self.current_key_press_info
        data = get_sent_data(self.opts.send_text_map, key, scancode, mods,
                             window, action)
        if data:
            window.write_to_child(data)
            return True
        return False

    def combine(self, *actions):
        for key_action in actions:
            self.dispatch_action(key_action)

    def on_focus(self, window, focused):
        self.window_is_focused = focused
        w = self.active_window
        if w is not None:
            w.focus_changed(focused)

    def display_scrollback(self, data):
        if self.opts.scrollback_in_new_tab:
            self.display_scrollback_in_new_tab(data)
        else:
            tab = self.active_tab
            if tab is not None:
                tab.new_special_window(
                    SpecialWindow(self.opts.scrollback_pager, data,
                                  _('History')))

    def switch_focus_to(self, window_idx):
        tab = self.active_tab
        tab.set_active_window_idx(window_idx)
        old_focus = tab.active_window
        if not old_focus.destroyed:
            old_focus.focus_changed(False)
        tab.active_window.focus_changed(True)

    def send_fake_scroll(self, window_idx, amt, upwards):
        tab = self.active_tab
        w = tab.windows[window_idx]
        k = get_key_map(w.screen)[GLFW_KEY_UP if upwards else GLFW_KEY_DOWN]
        w.write_to_child(k * amt)

    def open_url(self, url):
        if url:
            open_url(url, self.opts.open_url_with)

    def gui_close_window(self, window):
        window.destroy()
        for tab in self.tab_manager:
            if window in tab:
                break
        else:
            return
        tab.remove_window(window)
        if len(tab) == 0:
            self.tab_manager.remove(tab)
            tab.destroy()
            if len(self.tab_manager) == 0:
                if not self.shutting_down:
                    self.glfw_window.set_should_close(True)
                    glfw_post_empty_event()

    def destroy(self):
        self.shutting_down = True
        self.child_monitor.shutdown()
        wakeup()
        self.child_monitor.join()
        for t in self.tab_manager:
            t.destroy()
        del self.tab_manager
        destroy_sprite_map()
        destroy_global_data()
        del self.glfw_window

    def paste_to_active_window(self, text):
        if text:
            w = self.active_window
            if w is not None:
                w.paste(text)

    def paste_from_clipboard(self):
        text = self.glfw_window.get_clipboard_string()
        self.paste_to_active_window(text)

    def paste_from_selection(self):
        text = get_primary_selection()
        self.paste_to_active_window(text)

    def set_primary_selection(self):
        w = self.active_window
        if w is not None and not w.destroyed:
            text = w.text_for_selection()
            if text:
                set_primary_selection(text)

    def next_tab(self):
        self.tab_manager.next_tab()

    def previous_tab(self):
        self.tab_manager.next_tab(-1)

    def new_tab(self):
        self.tab_manager.new_tab()

    def move_tab_forward(self):
        self.tab_manager.move_tab(1)

    def move_tab_backward(self):
        self.tab_manager.move_tab(-1)

    def display_scrollback_in_new_tab(self, data):
        self.tab_manager.new_tab(special_window=SpecialWindow(
            self.opts.scrollback_pager, data, _('History')))
Esempio n. 23
0
class AsyncTornadoRPCClient(AsyncRabbitConnectionBase):

    """
    Wrap `pika.adapters.tornado_connection.TornadoConnection` 
    to provide a simple RPC client powered by ``tornado.gen.engine``
    semantics.

    """

    def __init__(self, *args, **kwargs):
        self.declare_rpc_queue_lock = Lock()
        
        self.connection_open_callbacks = []
        self.rpc_queue_callbacks = []
        
        self.rpc_queue = None
        
        self.futures = WeakValueDictionary()

        super(AsyncTornadoRPCClient, self).__init__(*args, **kwargs)

    def on_closed(self, connection):
        logger.warning('AsyncRabbitClient.on_close: closed!')
        self.rpc_queue = None

    @gen.engine
    def rpc_queue_declare(self, callback, **kwargs):
        if not self.declare_rpc_queue_lock.acquire(False):
            logger.info('RPC Queue is already in the process of '
                        'being declared (declare_rpc_queue_lock '
                        'could not be acquired).')
            callback()
            return

        try:
            self.rpc_queue = yield gen.Task(self.queue_declare,
                                            exclusive=True,
                                            auto_delete=True)
            yield gen.Task(self.basic_consume, queue=self.rpc_queue)

            logger.info('Adding callbacks that are waiting for an RPC '
                        'queue to the tornado queue.')
            while self.rpc_queue_callbacks:
                cb = self.rpc_queue_callbacks.pop()
                self.io_loop.add_callback(cb)
            logger.info('Done adding callbacks.')

        finally:
            self.declare_rpc_queue_lock.release()
            callback()

    @gen.engine
    def ensure_rpc_queue(self, callback):
        logger.info('Ensuring that an RPC queue has been declared.')
        yield Task(self.ensure_connection)
        if self.rpc_queue:
            logger.info('The RPC queue is already open.')
            callback()
        else:
            logger.info('Adding callback to list of callbacks '
                        'waiting for the RPC queue to be open.')
            callback = stack_context.wrap(callback)
            self.rpc_queue_callbacks.append(callback)

            logger.info('Calling rpc_queue_declare().')
            rpc_queue = yield gen.Task(self.rpc_queue_declare)
            logger.info('rpc_queue_declare has been called.')

    @gen.engine
    def rpc(self, rpc_request, properties=None, callback=None):
        '''
        Publish an RPC request.  Returns a :class:`RPCResponseFuture`.

        :param rpc_request: An instance of :class:`RPCRequest`.

        '''

        callback = stack_context.wrap(callback)

        yield Task(self.ensure_connection)
        yield Task(self.ensure_rpc_queue)
        
        if not properties:
            correlation_id = str(uuid.uuid4())
            properties = pika.BasicProperties(reply_to=self.rpc_queue,
                                              correlation_id=correlation_id)
        
        logger.info('Publishing RPC request with key: %s' %
                    rpc_request.routing_key)

        self.channel.basic_publish(exchange=rpc_request.exchange,
                                   routing_key=rpc_request.routing_key,
                                   body=rpc_request.json_params,
                                   properties=properties)

        logger.info('Constructing RPC response future with cid: %s' %
                    correlation_id)
        future = RPCResponseFuture(correlation_id,
                                   timeout=rpc_request.timeout,
                                   io_loop=self.io_loop)
        self.futures[correlation_id] = future

        callback(future)

    @gen.engine
    def basic_publish(self, rpc_request, properties=None, callback=None):
        yield Task(self.ensure_connection)
        yield Task(self.ensure_rpc_queue)
        
        if not properties:
            properties = pika.BasicProperties()
        
        logger.info('Publishing message request with key: %s' %
                    rpc_request.routing_key)

        self.channel.basic_publish(exchange=rpc_request.exchange,
                                   routing_key=rpc_request.routing_key,
                                   body=rpc_request.json_params,
                                   properties=properties)
        logger.info('channel.basic_publish finished.')
        callback()
    
    def consume_message(self, channel, method, header, body):
        logger.info('RPC response consumed')
        cid = header.correlation_id

        try:
            future = self.futures.pop(cid)
            response = RPCResponse(channel, method, header, body)
            cb = partial(future.response_callback, response)
            self.io_loop.add_callback(cb)
        except KeyError:
            logger.warning('AsyncRabbitClient.consume_message received an'
                           ' unrecognized correlation_id: %s.  Maybe the'
                           ' RPC took too long and was timed out, or maybe'
                           ' the response was sent more than once.' % cid)
Esempio n. 24
0
class EntityCache(object):
    """
    Cache for entities.

    Supports add and remove operations as well as lookup by ID and
    by slug.
    """
    def __init__(self, entities=None, allow_none_id=True):
        """
        :param bool allow_none_id: Flag specifying if calling :meth:`add`
            with an entity that does not have an ID is allowed.
        """
        # Flag indicating if None IDs are allowed in this cache.
        self.__allow_none_id = allow_none_id
        # List of cached entities. This is the only place we are holding a
        # real reference to the entity.
        if entities is None:
            entities = []
        self.__entities = entities
        # Dictionary mapping entity IDs to entities for fast lookup by ID.
        self.__id_map = WeakValueDictionary()
        # Dictionary mapping entity slugs to entities for fast lookup by slug.
        self.__slug_map = {}

    def get_by_id(self, entity_id):
        """
        Performs a lookup of an entity by its ID.

        :param int entity_id: entity ID.
        :return: entity found or ``None``.
        """
        return self.__id_map.get(entity_id)

    def has_id(self, entity_id):
        """
        Checks if this entity cache holds an entity with the given ID.

        :return: Boolean result of the check.
        """
        return entity_id in self.__id_map

    def get_by_slug(self, entity_slug):
        """
        Performs a lookup of an entity by its slug.

        :param str entity_id: entity slug.
        :return: entity found or ``None``.
        """
        return self.__slug_map.get(entity_slug)

    def has_slug(self, entity_slug):
        return entity_slug in self.__slug_map

    def add(self, entity):
        """
        Adds the given entity to this cache.

        :param entity: Entity to add.
        :type entity: Object implementing :class:`everest.interfaces.IEntity`.
        :raises ValueError: If the ID of the entity to add is ``None``
          (unless the `allow_none_id` constructor argument was set).
        """
        do_append = self.__check_new(entity)
        if do_append:
            self.__entities.append(entity)

    def remove(self, entity):
        """
        Removes the given entity from this cache.

        :param entity: Entity to remove.
        :type entity: Object implementing :class:`everest.interfaces.IEntity`.
        :raises KeyError: If the given entity is not in this cache.
        :raises ValueError: If the ID of the given entity is `None`.
        """
        self.__id_map.pop(entity.id, None)
        self.__slug_map.pop(entity.slug, None)
        self.__entities.remove(entity)

    def update(self, source_data, target_entity):
        """
        Updates the state of the target entity with the given source data.

        :param target_entity: Entity to update.
        :type target_entity: Object implementing
          :class:`everest.interfaces.IEntity`.
        """
        EntityState.set_state_data(target_entity, source_data)

    def get_all(self):
        """
        Returns the list of all entities in this cache in the order they
        were added.
        """
        return self.__entities

    def retrieve(self, filter_expression=None,
                 order_expression=None, slice_key=None):
        """
        Retrieve entities from this cache, possibly after filtering, ordering
        and slicing.
        """
        ents = iter(self.__entities)
        if not filter_expression is None:
            ents = filter_expression(ents)
        if not order_expression is None:
            # Ordering always involves a copy and conversion to a list, so
            # we have to wrap in an iterator.
            ents = iter(order_expression(ents))
        if not slice_key is None:
            ents = islice(ents, slice_key.start, slice_key.stop)
        return ents

    def rebuild(self, entities):
        """
        Rebuilds the ID and slug maps of this cache.

        This can be necessary when entities obtain their IDs only after
        they have been flushed to the backend.
        """
        for ent in entities:
            self.__check_new(ent)

    def __contains__(self, entity):
        if not entity.id is None:
            is_contained = entity.id in self.__id_map
        else:
            is_contained = entity in self.__entities
        return is_contained

    def __check_new(self, entity):
        # For certain use cases (e.g., staging), we do not want the entity to
        # be added to have an ID yet.
        do_append = True
        if not entity.id is None:
            if entity.id in self.__id_map:
                if not self.__id_map[entity.id] is entity:
                    raise ValueError('Duplicate entity ID "%s". %s'
                                     % (entity.id, entity))
                else:
                    do_append = False
            else:
                self.__id_map[entity.id] = entity
        elif not self.__allow_none_id:
            raise ValueError('Entity ID must not be None.')
        # The slug can be a lazy attribute depending on the
        # value of other (possibly not yet initialized) attributes which is
        # why we can not always assume it is available at this point.
        if do_append and hasattr(entity, 'slug') and not entity.slug is None:
            ents = self.__slug_map.get(entity.slug)
            if not ents is None:
                ents.append(entity)
            else:
                self.__slug_map[entity.slug] = WeakList([entity])
        return do_append
Esempio n. 25
0
class HeapManager(threading.Thread):
    """
    @summary: This class is intended to manage all dataClay objects in runtime's memory.
    """
    """ Logger """
    logger = None

    def __init__(self, theruntime):
        """
        @postcondition: Constructor of the object called from sub-class
        @param theruntime: Runtime being managed 
        """
        """ Memory objects. This dictionary must contain all objects in runtime memory (client or server), as weakrefs. """
        self.inmemory_objects = WeakValueDictionary()
        threading.Thread.__init__(self)
        self._finished = threading.Event()
        """ Runtime being monitorized. Java uses abstract functions to get the field in the proper type (EE or client) due to type-check. Not needed here. """
        self.runtime = theruntime
        self.logger = logging.getLogger(__name__)
        self.daemon = True
        self.logger.debug("HEAP MANAGER created.")

    def get_heap(self):
        return self.inmemory_objects

    def shutdown(self):
        """Stop this thread"""
        self.logger.debug("HEAP MANAGER shutdown request received.")
        self._finished.set()

    def run(self):
        """
        @postcondition: Overrides run function 
        """
        gc_check_time_interval_seconds = Configuration.MEMMGMT_CHECK_TIME_INTERVAL / 1000.0
        while 1:
            self.logger.trace("HEAP MANAGER THREAD is awake...")
            if self._finished.isSet(): break
            self.run_task()

            # sleep for interval or until shutdown
            self.logger.trace("HEAP MANAGER THREAD is going to sleep...")
            self._finished.wait(gc_check_time_interval_seconds)

        self.logger.debug("HEAP MANAGER THREAD Finished.")

    def _add_to_inmemory_map(self, dc_object):
        """
        @postcondition: the object is added to inmemory map
        @param dc_object: object to add
        """
        oid = dc_object.get_object_id()
        self.inmemory_objects[oid] = dc_object

    def remove_from_heap(self, object_id):
        """
        @postcondition: Remove reference from Heap. Even if we remove it from the heap, 
        the object won't be Garbage collected till HeapManager flushes the object and releases it.
        @param object_id: id of object to remove from heap
        """
        self.inmemory_objects.pop(object_id)

    def get_from_heap(self, object_id):
        """
        @postcondition: Get from heap. 
        @param object_id: id of object to get from heap
        @return Object with id provided in heap or None if not found.
        """
        try:
            obj = self.inmemory_objects[object_id]
            self.logger.debug("Hit in Heap object %s" % str(object_id))
            return obj
        except KeyError:
            self.logger.debug("Miss in Heap object %s" % str(object_id))
            return None

    def exists_in_heap(self, object_id):
        """
        @postcondition: Exists from heap. 
        @param object_id: id of object to get from heap
        @return True if exists. False otherwise.
        """
        try:
            if self.inmemory_objects[object_id] is None:
                return False
            else:
                return True
        except KeyError:
            return False

    def heap_size(self):
        """
        @postcondition: Get heap size. 
        @return Heap size
        """
        return len(self.inmemory_objects)

    def count_loaded_objs(self):
        num_loaded_objs = 0
        for obj in self.inmemory_objects.values():
            if obj.is_loaded():
                num_loaded_objs = num_loaded_objs + 1
        return num_loaded_objs

    @abstractmethod
    def flush_all(self):
        pass

    @abstractmethod
    def run_task(self):
        pass

    def cleanReferencesAndLockers(self):
        """
        @postcondition: Clean references and lockers not being used.
        """
        self.runtime.locker_pool.cleanLockers()
Esempio n. 26
0
class RPC(RPC_):
    """RPC (without underscore) extends RPC_ with the possibility of
    sending (and receiving) responses.

    RPCs differ from normal messages in that they generate an
    additional "response" message.

    It adds:
    * An additional wrapper (built over the one in RPC_) for callbacks
      that serializes the return (or any exception that is raised) and
      sends it to a "response" queue.
    * A new endpoint type "rpc"
    * A low level publish_rpc method that works similar to Queue.publish
      except that it does the preparations so that a response can be
      received.
    * A high level `rpc` method for calling remote procedures in a
      friendly way.
    * The constructor take and additional "loop" argument.

    The rpc interface is based on Futures. Each calls yields a Future
    object that can be awaited.

    To use the RPC client, you must first call the start_client()
    coroutine-method to create the response queue and register the
    receiver callback.

    Implementation details:
    The Futures for unanswered rpcs are kept in a WeakValueDictionary.
    If the application looses all references for a Future then it is
    deleted and can't be recovered. When the response arrives, it will
    be discarded.
    """
    def __init__(self, loop, *args, **kwargs):
        """loop must be a asycio.BaseEventLoop-derived object.
        The rest of the argument are passed on to the superclass."""
        super().__init__(*args, **kwargs)
        self._loop = loop
        self._correlation_gen = itertools.count()
        self._open_responses = WeakValueDictionary()
        self._client_started = False

    # RPC server: Methods dealing with receiving, processing and
    # responding to RPCs

    async def _rpc_receive_wrapper(self, f, channel, body, envelope,
                                   properties, ack_mode, **kwargs):
        try:
            r = await self._rpc_noret_receive_wrapper(f,
                                                      channel,
                                                      body,
                                                      envelope,
                                                      properties,
                                                      ack_mode=ack_mode,
                                                      **kwargs)
        except Exception as e:
            rdata, ctype, cenc = self.format_exception(e)
        else:
            rdata, ctype, cenc = self.format_response(r)

        cid = properties.correlation_id

        new_props = {
            'timestamp': _tstamp(),
            'content_type': ctype,
            'content_encoding': cenc,
            'correlation_id': cid,
            'reply_to': properties.reply_to
        }

        logger.debug("Sending response: %s", cid)

        await self.channel.publish(rdata,
                                   '',
                                   properties.reply_to,
                                   properties=new_props)

    def register_rpc(self,
                     queue_name,
                     routing_key,
                     function,
                     ack_mode=ACK.receive,
                     extended=False,
                     **kwargs):
        wrapped = functools.partial(self._rpc_receive_wrapper,
                                    function,
                                    ack_mode=ack_mode,
                                    extended=extended)
        return self.register_queue(queue_name, routing_key, wrapped, **kwargs)

    @staticmethod
    def format_response(obj):
        data = json.dumps({'return': obj}).encode("utf-8")

        return data, "application/json", "utf-8"

    @staticmethod
    def format_exception(exception):
        """Convert `exception` to a dict and serialize it"""
        err_info = {
            'exc_type': type(exception).__name__,
            'exc_message': str(exception),
            'traceback': traceback.format_tb(exception.__traceback__)
        }

        data = json.dumps(err_info).encode("utf-8")

        return data, "application/json", "utf-8"

    # RPC client: Methods dealing with sending RPC requests and
    #    receiving the results.

    async def start_client(self):
        """Declare the response queue.
        You should call this only once. After the first call, further
        call to start_client() will do nothing.
        """
        if self._client_started:
            return

        r = await self.channel.queue_declare(exclusive=True, auto_delete=True)
        self.response_queue = r['queue']

        logger.debug("Return queue is: %s", self.response_queue)

        await self.channel.basic_consume(self._response_handler,
                                         self.response_queue,
                                         no_ack=True)

        self._client_started = True

    async def _response_handler(self, channel, body, envelope, properties):
        d = self.decode_response(body)
        cid = properties.correlation_id

        try:
            pending_future = self._open_responses.pop(cid)
        except KeyError:
            logger.debug("Discarded response: %s", cid)
            return

        if "return" in d:
            pending_future.set_result(d["return"])
        else:
            pending_future.set_exception(
                RemoteException(d["exc_message"], d["exc_type"],
                                d["traceback"]))

    async def publish_rpc(self, data, routing_key, **kwargs):
        """Publish a message along with the necessary steps to allow for
        a reply:

        * Create a future and register it.
        * Set the reply_to field.
        * Generate a new correlation id.

        Returns:
            future: A new asyncio.Future object associated with the
                    reply to this message. The future is registered in
                    self._open_responses.
        """
        # The correlation_id MUST be a string (why????)
        cid = str(next(self._correlation_gen))
        future = _create_future(self._loop)

        self._open_responses[cid] = future

        rpc_properties = {
            'reply_to': self.response_queue,
            'correlation_id': cid
        }
        rpc_properties.update(kwargs.pop("properties", ()))

        await self.publish(data,
                           routing_key,
                           properties=rpc_properties,
                           **kwargs)

        return future

    def rpc(self, proc_name, mandatory=False, immediate=False, **properties):
        """Return a callable:
            f(*args, **kwargs)

        that can be used to do remote procedure calls that send a reply.

        The callable returns a coroutine that does the remote call and
        yields an asyncio.Future object.
        The result (or error) of the Future is set when a reply is
        received.

        The RPC object keeps weak references to all Futures created.
        If the Future is destroyed, the reply to that RPC will be
        silently ignored.

        Example:

        Assuming client is a RPC object
        ... f = client.rpc("some_function")
        ... x = await f(1, 2, r = 0)
        ... result = await x
        """

        return functools.partial(self._call, proc_name, self.publish_rpc,
                                 properties, mandatory, immediate)

    @staticmethod
    def decode_response(body):
        d = json.loads(body.decode("utf-8"))

        return d
Esempio n. 27
0
class Storage(object):
	def __init__(self, cache=None):
		# {(InstanceInfo(instance).model_info.model, instance.pk): InstanceInfo, ...}}
		self._alive = WeakValueDictionary()

		# {InstanceInfo(instance): instance, ...}}
		self._dirty = {}

		if cache is not None:
			self._cache = cache
		else:
			self._cache = MRUCache(settings.MAX_CACHE)

		signals.register(
			(
				"instance-deleted",
	
				"start-tracking-changes",
				"stop-tracking-changes",
	
				"model-pre-init",
				"model-post-init",
				"model-pre-save",
				"model-post-save",
				"model-pre-delete",
				"model-post-delete",
				"model-pre-update",
				"model-post-update",
				
				# "model-history-reset",
				# "model-history-redo",
				# "model-history-undo",
				
				"relation-pre-get",
				"relation-post-get",
				"relation-pre-set",
				"relation-post-set",
				"relation-pre-delete",
				"relation-post-delete",
				"relation-pre-add",
				"relation-post-add",
				"relation-pre-remove",
				"relation-post-remove",

				"model-do-cache",
				"model-do-not-cache",
			),
			self
		)
		signals.register_with_callback("cache-rollback", self, "rollback")

	def get(self, query):
		return QuerySetIterator(query, self)

	def clear(self):
		self._dirty.clear()
		self._alive.clear()
		self._cache.clear()

	def set_dirty(self, inst_info):
		self._dirty[inst_info] = inst_info.get_inst()

	def cache(self, inf):
		self._alive.pop((inf.model_info.model, inf._lazypkval), None)
		self._alive[(inf.model_info.model, inf.get_pk_as_key())] = inf
		self._cache.add(inf)

	def uncache(self, inf):
		if inf in self._cache:
			self._cache.remove(inf)

		self._dirty.pop(inf, None)
		self._alive.pop((inf.model_info.model, inf.get_pk_as_key()), None)
		self._alive.pop((inf.model_info.model, inf._lazypkval), None)

	### signals ###

	#??? what is this used for?
	def instance_deleted(self, inst_info):
		print "instance_deleted", inst_info

	def model_pre_init(self, instance, **kwargs):
		signals.fire("stop-tracking-changes", instance=instance)

	def model_post_init(self, instance, **kwargs):
		inf = get_inst_info(instance)
		signals.fire("start-tracking-changes", instance=instance)
		self._alive[(inf.model_info.model, inf.get_pk_as_key())] = inf

		# If instance was not initialized with a value for primary key,
		# then it has not been saved yet and goes into self._dirty.
		if instance.pk is None:
			self.set_dirty(inf)

	def start_tracking_changes(self, instance):
		get_inst_info(instance)._meta["track-changes"] = True

	def stop_tracking_changes(self, instance):
		get_inst_info(instance)._meta["track-changes"] = False

	def model_pre_save(self, instance):
		pass

	def model_post_save(self, instance, created):
		inf = get_inst_info(instance)
		if not inf._meta["do-cache"]:
			return

		self.cache(inf)
		self._dirty.pop(inf, None)

		# On calling Model.delete, tracking of changes is stopped, so start
		# tracking now.
		signals.fire("start-tracking-changes", instance=instance)

	def model_pre_update(self, instance, value, fieldname):
		# instance._inst_info.record_change(fieldname, value)
		pass

	def model_post_update(self, instance, value, fieldname):
		inf = get_inst_info(instance)

		if not inf in self._dirty:
			self.set_dirty(inf)

	def model_pre_delete(self, instance):
		signals.fire("stop-tracking-changes", instance=instance)

	def model_post_delete(self, instance, deleted):
		if not deleted:
			return

		inf = get_inst_info(instance)

		self.uncache(inf)

		instance.id = None

	def model_do_cache(self, instance):
		inf = get_inst_info(instance)
		self.cache(inf)
		self.set_dirty(inf)
		inf._meta["do-cache"] = True
		signals.fire("start-tracking-changes", instance=instance)

	def model_do_not_cache(self, instance):
		inf = get_inst_info(instance)
		self.uncache(inf)
		inf._meta["do-cache"] = False
		signals.fire("stop-tracking-changes", instance=instance)

	def model_history_reset(self, instance, **kwargs):
		raise NotImplementedError

	def model_history_undo(self, instance, fieldname, **kwargs):
		raise NotImplementedError

	def model_history_redo(self, instance, fieldname, **kwargs):
		raise NotImplementedError

	def relation_pre_get(self, manager, **kwargs):
		pass
	
	def relation_post_get(self, manager, **kwargs):
		pass

	def relation_pre_set(self, manager, values, **kwargs):
		pass
	
	def relation_post_set(self, manager, values, **kwargs):
		pass

	def relation_pre_delete(self, manager, **kwargs):
		pass

	def relation_post_delete(self, manager, **kwargs):
		pass

	def relation_pre_add(self, manager, values, **kwargs):
		pass

	def relation_post_add(self, manager, values, **kwargs):
		pass

	def relation_pre_remove(self, manager, values, **kwargs):
		pass

	def relation_post_remove(self, manager, values, **kwargs):
		pass
Esempio n. 28
0
class LRUCache:
    """A Least-Recently-Used cache.

    The cache also includes a 'graveyard' of items that have been evicted from
    the cache. These are stored as weak references throught a
    `weakref.WeakValueDictionary`. Evicted items can be resurrected if they are
    retrieved before gc.

    I purposely chose to *not* implement __getitem__ and __setitem__ to distance
    this class from builtin dicts and similar objects.

    Based on:
    https://docs.python.org/3.9/library/collections.html#ordereddict-examples-and-recipes

    :param maxsize: The maximum number of items to store in the cache.

    >>> import gc
    >>> class A:
    ...     def __init__(self, x):
    ...         self.x = x
    ...     def __repr__(self):
    ...         return 'ALIVE ' + str(self.x)
    >>> c = LRUCache(maxsize=2)
    >>> c.set(1, A(1))
    >>> c.set(2, A(2))
    >>> c.set(3, A(3))
    >>> _ = gc.collect()
    >>> c.get(1, 'DEAD')
    'DEAD'
    >>> c.get(2)
    ALIVE 2
    >>> c.get(3)
    ALIVE 3
    >>> c.set(4, A(4))
    >>> _ = gc.collect()
    >>> c.get(2, 'DEAD')
    'DEAD'
    >>> c.hits
    2
    >>> c.misses
    2
    """
    def __init__(self, maxsize):
        self.maxsize = maxsize
        self.lru = OrderedDict()
        self.grave = WeakValueDictionary()
        self.hits = 0
        self.misses = 0
        self.resurrections = 0
        self.rwlock = ReadWriteLock()

    def get(self, key, default=None):
        """Retrieve an item from the cache if it exists.

        :param key: The key to retrieve.
        :param default: The default value to return if the item doesn't exist.
        :return: The value associated with key.
        """

        with self.rwlock.read_access:
            try:
                value = self.lru[key]
                self.hits += 1
            except KeyError:
                # try to resurrect
                value = self.grave.pop(key, None)
                if value is not None:
                    self.lru[key] = value
                    self.resurrections += 1
                else:
                    self.misses += 1
                    return default

            self.lru.move_to_end(key)
        return value

    def set(self, key, value):
        """Add a value to the cache.

        :param key: The key for the item.
        :param value: The value to store.
        """
        with self.rwlock.write_access:
            if key in self.grave:
                del self.grave[key]

            if key in self.lru:
                self.lru.move_to_end(key)
            self.lru[key] = value

            while len(self.lru) > self.maxsize:
                # remove old items from the cache
                # send them to live with the dead
                (k, v) = self.lru.popitem(last=False)
                self.grave[k] = v

    def delete(self, key):
        """Remove a value from the cache.

        If there is no item associated with key, do nothing.

        :param key: The key to delete.
        """
        with self.rwlock.write_access:
            if key in self.grave:
                del self.grave[key]
            if key in self.lru:
                del self.lru[key]
Esempio n. 29
0
class ConfCache(QObject):
    def __init__(self, weak=True):
        super(ConfCache, self).__init__()

        if weak:
            self.cache = WeakValueDictionary()
        else:
            self.cache = {}  # pylint: disable=redefined-variable-type

        self.monitor = MonitorWithRename(parent=self)
        self.monitor.fileChanged.connect(self.onFileChanged)

    @Slot(str)
    def onFileChanged(self, path):
        """Method called when a monitored file changes

		This method should be reimplemented by subclasses to reload a configuration object.

		When the cache stores weak-references, config objects may be deleted at anytime, but the associated
		file will stay monitored.
		A method reimplementation should check if the config still exists in cache before performing a costly
		file reload. See :any:`unmonitorCollected`.
		"""
        pass

    def unmonitorCollected(self, path=None):
        """Stop monitoring file if the config object was garbage-collected.

		:param path: if None, verifies all monitored files
		:returns: True if `path` was unmonitored, else False. Meaningless if `path` is None.
		"""

        if path is None:
            for f in self.monitor.files():
                self.unmonitorCollected(f)
            return False
        else:
            if path in self.cache:
                return False
            self.monitor.removePath(path)
            return True

    def addConf(self, path, conf):
        """Add a config object to cache

		The config file `path` will be monitored for changes and the `conf` object will be added to cache.

		:param path: path of the config
		:param conf: the 
		"""
        self.cache[path] = conf
        self.monitor.addPath(path)

    def delConf(self, path):
        """Remove a config from cache.

		The config file at `path` is also unmonitored.
		"""
        self.monitor.removePath(path)
        self.cache.pop(path, None)

    def get(self, path):
        """Get the config object for path

		:returns: the config object in cache, or None if not found or the object was garbage-collected.
		"""
        return self.cache.get(path)
Esempio n. 30
0
class UDPDemux(object):
    """Explicitly routing UDP demux

    This class implements a demux that forwards packets from the root
    socket to sockets belonging to connections. It does this whenever its
    service method is invoked.

    Methods:

      remove_connection -- remove an existing connection
      service -- distribute datagrams from the root socket to connections
      forward -- forward a stored datagram to a connection
    """

    _forwarding_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    _forwarding_socket.bind(('127.0.0.1', 0))

    def __init__(self, datagram_socket):
        """Constructor

        Arguments:
        datagram_socket -- the root socket; this must be a bound, unconnected
                           datagram socket
        """

        if datagram_socket.type != socket.SOCK_DGRAM:
            raise InvalidSocketError("datagram_socket is not of " +
                                     "type SOCK_DGRAM")
        try:
            datagram_socket.getsockname()
        except:
            raise InvalidSocketError("datagram_socket is unbound")
        try:
            datagram_socket.getpeername()
        except:
            pass
        else:
            raise InvalidSocketError("datagram_socket is connected")

        self.datagram_socket = datagram_socket
        self.payload = ""
        self.payload_peer_address = None
        self.connections = WeakValueDictionary()

    def get_connection(self, address):
        """Create or retrieve a muxed connection

        Arguments:
        address -- a peer endpoint in IPv4/v6 address format; None refers
                   to the connection for unknown peers

        Return:
        a bound, connected datagram socket instance
        """

        if self.connections.has_key(address):
            return self.connections[address]
        
        # We need a new datagram socket on a dynamically assigned ephemeral port
        conn = socket.socket(self._forwarding_socket.family,
                             self._forwarding_socket.type,
                             self._forwarding_socket.proto)
        conn.bind((self._forwarding_socket.getsockname()[0], 0))
        conn.connect(self._forwarding_socket.getsockname())
        if not address:
            conn.setblocking(0)
        self.connections[address] = conn
        _logger.debug("Created new connection for address: %s", address)
        return conn

    def remove_connection(self, address):
        """Remove a muxed connection

        Arguments:
        address -- an address that was previously returned by the service
                   method and whose connection has not yet been removed

        Return:
        the socket object whose connection has been removed
        """

        return self.connections.pop(address)

    def service(self):
        """Service the root socket

        Read from the root socket and forward one datagram to a
        connection. The call will return without forwarding data
        if any of the following occurs:

          * An error is encountered while reading from the root socket
          * Reading from the root socket times out
          * The root socket is non-blocking and has no data available
          * An empty payload is received
          * A non-empty payload is received from an unknown peer (a peer
            for which get_connection has not yet been called); in this case,
            the payload is held by this instance and will be forwarded when
            the forward method is called

        Return:
        if the datagram received was from a new peer, then the peer's
        address; otherwise None
        """

        self.payload, self.payload_peer_address = \
          self.datagram_socket.recvfrom(UDP_MAX_DGRAM_LENGTH)
        _logger.debug("Received datagram from peer: %s",
                      self.payload_peer_address)
        if not self.payload:
            self.payload_peer_address = None
            return
        if self.connections.has_key(self.payload_peer_address):
            self.forward()
        else:
            return self.payload_peer_address

    def forward(self):
        """Forward a stored datagram

        When the service method returns the address of a new peer, it holds
        the datagram from that peer in this instance. In this case, this
        method will perform the forwarding step. The target connection is the
        one associated with address None if get_connection has not been called
        since the service method returned the new peer's address, and the
        connection associated with the new peer's address if it has.
        """

        assert self.payload
        assert self.payload_peer_address
        if self.connections.has_key(self.payload_peer_address):
            conn = self.connections[self.payload_peer_address]
            default = False
        else:
            conn = self.connections[None]  # propagate exception if not created
            default = True
        _logger.debug("Forwarding datagram from peer: %s, default: %s",
                      self.payload_peer_address, default)
        self._forwarding_socket.sendto(self.payload, conn.getsockname())
        self.payload = ""
        self.payload_peer_address = None
Esempio n. 31
0
class Worker(GearmanProtocolMixin, asyncio.Protocol):
    def __init__(self,
                 *functions,
                 loop=None,
                 grab_type=Type.GRAB_JOB,
                 timeout=None):
        super(Worker, self).__init__(loop=loop)
        self.transport = None
        self.main_task = None
        self.functions = OrderedDict()
        self.running = WeakValueDictionary()
        self.waiters = []
        self.shutting_down = False
        self.timeout = timeout

        grab_mapping = {
            Type.GRAB_JOB: self.grab_job,
            Type.GRAB_JOB_UNIQ: self.grab_job_uniq,
            Type.GRAB_JOB_ALL: self.grab_job_all,
        }

        try:
            self.grab = grab_mapping[grab_type]
        except KeyError:
            raise RuntimeError(
                'Grab type must be one of GRAB_JOB, GRAB_JOB_UNIQ or GRAB_JOB_ALL'
            )

        for func_arg in functions:
            try:
                func, name = func_arg
                self.functions[name] = func
            except TypeError:
                name = func_arg.__name__
                self.functions[name] = func_arg

    def connection_made(self, transport):
        logger.info('Connection is made to %r',
                    transport.get_extra_info('peername'))
        self.transport = transport

        if self.timeout is not None:
            can_do = partial(self.can_do_timeout, timeout=self.timeout)
        else:
            can_do = self.can_do

        for fname in self.functions.keys():
            logger.debug('Registering function %s', fname)
            can_do(fname)
        self.main_task = self.get_task(self.run())

    def connection_lost(self, exc):
        self.transport = None

    def get_task(self, coro):
        return asyncio.ensure_future(coro, loop=self.loop)

    async def run(self, ):
        no_job = NoJob()
        while not self.shutting_down:
            self.pre_sleep()
            await self.wait_for(Type.NOOP)
            response = await self.grab()
            if response == no_job:
                continue

            try:
                job_info = self._to_job_info(response)
                func = self.functions.get(job_info.function)
                if not func:
                    logger.warning('Failed to find function %s in %s',
                                   job_info.function,
                                   ', '.join(self.functions.keys()))
                    self.work_fail(job_info.handle)
                    continue

                try:
                    result_or_coro = func(job_info)
                    if asyncio.iscoroutine(result_or_coro):
                        task = self.get_task(result_or_coro)
                        self.running[job_info.handle] = task
                        result = await task
                    else:
                        result = result_or_coro
                    self.work_complete(job_info.handle, result)
                except Exception as ex:
                    logger.exception('Job (handle %s) resulted with exception',
                                     job_info.handle)
                    self.work_exception(job_info.handle, str(ex))
                finally:
                    self.running.pop(job_info.handle, None)

            except AttributeError:
                logger.error('Unexpected GRAB_JOB response %r', response)

    async def shutdown(self, graceful=False):
        logger.debug('Shutting down worker {}gracefully...'.format(
            '' if graceful else 'un'))
        self.shutting_down = True
        sub_tasks = list(self.running.values())
        if graceful:
            if sub_tasks:
                await asyncio.wait(sub_tasks, loop=self.loop)
        else:

            async def cancel_and_wait(tasks):
                for task in tasks:
                    task.cancel()
                try:
                    await asyncio.wait(tasks, loop=self.loop)
                except asyncio.CancelledError:
                    pass

            if sub_tasks:
                await cancel_and_wait(sub_tasks)
        self.main_task.cancel()

        if self.transport:
            self.transport.close()

    @staticmethod
    def _to_job_info(job_assign):
        attrs = ['handle', 'function', 'uuid', 'reducer', 'workload']
        values = [getattr(job_assign, attr, None) for attr in attrs]
        return JobInfo(*values)

    def register_function(self, func, name=''):
        if not self.transport:
            raise RuntimeError('Worker must be connected to the daemon')
        name = name or func.__name__
        self.functions[name] = func
        return self.can_do(name)

    def grab_job_all(self):
        self.send(Type.GRAB_JOB_ALL)
        return self.wait_for(Type.NO_JOB, Type.JOB_ASSIGN_ALL)

    def grab_job_uniq(self):
        self.send(Type.GRAB_JOB_UNIQ)
        return self.wait_for(Type.NO_JOB, Type.JOB_ASSIGN_UNIQ)

    def grab_job(self):
        self.send(Type.GRAB_JOB)
        return self.wait_for(Type.NO_JOB, Type.JOB_ASSIGN)

    def pre_sleep(self):
        self.send(Type.PRE_SLEEP)

    def can_do(self, function):
        self.send(Type.CAN_DO, function)

    def can_do_timeout(self, function, timeout):
        self.send(Type.CAN_DO_TIMEOUT, function, timeout)

    def work_fail(self, handle):
        self.send(Type.WORK_FAIL, handle)

    def work_exception(self, handle, data):
        self.send(Type.WORK_EXCEPTION, handle, data)

    def work_complete(self, handle, result):
        if result is None:
            result = ''
        self.send(Type.WORK_COMPLETE, handle, result)

    def set_client_id(self, client_id):
        self.send(Type.SET_CLIENT_ID, client_id)
Esempio n. 32
0
class ConfCache(QObject):
	def __init__(self, weak=True):
		super(ConfCache, self).__init__()

		if weak:
			self.cache = WeakValueDictionary()
		else:
			self.cache = {}  # pylint: disable=redefined-variable-type

		self.monitor = MonitorWithRename(parent=self)
		self.monitor.fileChanged.connect(self.onFileChanged)

	@Slot(str)
	def onFileChanged(self, path):
		"""Method called when a monitored file changes

		This method should be reimplemented by subclasses to reload a configuration object.

		When the cache stores weak-references, config objects may be deleted at anytime, but the associated
		file will stay monitored.
		A method reimplementation should check if the config still exists in cache before performing a costly
		file reload. See :any:`unmonitorCollected`.
		"""
		pass

	def unmonitorCollected(self, path=None):
		"""Stop monitoring file if the config object was garbage-collected.

		:param path: if None, verifies all monitored files
		:returns: True if `path` was unmonitored, else False. Meaningless if `path` is None.
		"""

		if path is None:
			for f in self.monitor.files():
				self.unmonitorCollected(f)
			return False
		else:
			if path in self.cache:
				return False
			self.monitor.removePath(path)
			return True

	def addConf(self, path, conf):
		"""Add a config object to cache

		The config file `path` will be monitored for changes and the `conf` object will be added to cache.

		:param path: path of the config
		:param conf: the 
		"""
		self.cache[path] = conf
		self.monitor.addPath(path)

	def delConf(self, path):
		"""Remove a config from cache.

		The config file at `path` is also unmonitored.
		"""
		self.monitor.removePath(path)
		self.cache.pop(path, None)

	def get(self, path):
		"""Get the config object for path

		:returns: the config object in cache, or None if not found or the object was garbage-collected.
		"""
		return self.cache.get(path)
Esempio n. 33
0
class RedisStore:
    def __init__(self, db_host, db_port, db_num, db_pw):
        self.pool = ConnectionPool(max_connections=2,
                                   db=db_num,
                                   host=db_host,
                                   port=db_port,
                                   password=db_pw,
                                   decode_responses=True)
        self.redis = StrictRedis(connection_pool=self.pool)
        self.redis.ping()
        self._object_map = WeakValueDictionary()

    def create_object(self, dbo_class, dbo_dict, update_timestamp=True):
        dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type',
                                          dbo_class))
        if not dbo_class:
            return
        try:
            dbo_id = dbo_dict['dbo_id']
        except KeyError:
            dbo_id, dbo_dict = dbo_dict, {}
        if dbo_id is None or dbo_id == '':
            log.warn("create_object called with empty dbo_id")
            return
        dbo_id = str(dbo_id).lower()
        if self.object_exists(dbo_class.dbo_key_type, dbo_id):
            raise ObjectExistsError(dbo_id)
        dbo = dbo_class()
        dbo.dbo_id = dbo_id
        dbo.hydrate(dbo_dict)
        dbo.db_created()
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, dbo.dbo_id)
        self.save_object(dbo, update_timestamp)
        return dbo

    def load_object(self, dbo_key, key_type=None, silent=False):
        if key_type:
            try:
                key_type = key_type.dbo_key_type
            except AttributeError:
                pass
            try:
                dbo_key, dbo_id = ':'.join((key_type, dbo_key)), dbo_key
            except TypeError:
                if not silent:
                    log.exception("Invalid dbo_key passed to load_object",
                                  stack_info=True)
                return
        else:
            key_type, _, dbo_id = dbo_key.partition(':')
        cached_dbo = self._object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            if not silent:
                log.warn("Failed to find {} in database", dbo_key)
            return
        return self._json_to_obj(json_str, key_type, dbo_id)

    def save_object(self, dbo, update_timestamp=False, autosave=False):
        if update_timestamp:
            dbo.dbo_ts = int(time.time())
        if dbo.dbo_indexes:
            self._update_indexes(dbo)
        self._clear_old_refs(dbo)
        save_root, new_refs = dbo.to_db_value()
        self.redis.set(dbo.dbo_key, json_encode(save_root))
        if new_refs:
            self._set_new_refs(dbo, new_refs)
        log.debug("db object {} {}saved", dbo.dbo_key,
                  "auto" if autosave else "")
        self._object_map[dbo.dbo_key] = dbo
        return dbo

    def update_object(self, dbo, dbo_dict):
        dbo.hydrate(dbo_dict)
        return self.save_object(dbo, True)

    def delete_object(self, dbo):
        key = dbo.dbo_key
        dbo.db_deleted()
        self.delete_key(key)
        self._clear_old_refs(dbo)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, dbo.dbo_id)
        for children_type in dbo.dbo_children_types:
            self.delete_object_set(
                get_dbo_class(children_type),
                "{}_{}s:{}".format(dbo.dbo_key_type, children_type,
                                   dbo.dbo_id))
        for ix_name in dbo.dbo_indexes:
            ix_value = getattr(dbo, ix_name, None)
            if ix_value is not None and ix_value != '':
                self.delete_index('ix:{}:{}'.format(dbo.dbo_key_type, ix_name),
                                  ix_value)
        log.debug("object deleted: {}", key)
        self.evict_object(dbo)

    def load_cached(self, dbo_key):
        return self._object_map.get(dbo_key)

    def object_exists(self, obj_type, obj_id):
        return self.redis.exists('{}:{}'.format(obj_type, obj_id))

    def load_object_set(self, dbo_class, set_key=None):
        dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type',
                                          dbo_class))
        key_type = dbo_class.dbo_key_type
        if not set_key:
            set_key = dbo_class.dbo_set_key
        results = set()
        keys = deque()
        pipeline = self.redis.pipeline()
        for key in self.fetch_set_keys(set_key):
            dbo_key = ':'.join([key_type, key])
            try:
                results.add(self._object_map[dbo_key])
            except KeyError:
                keys.append(key)
                pipeline.get(dbo_key)
        for dbo_id, json_str in zip(keys, pipeline.execute()):
            if json_str:
                obj = self._json_to_obj(json_str, key_type, dbo_id)
                if obj:
                    results.add(obj)
                continue
            log.warn("Removing missing object from set {}", set_key)
            self.delete_set_key(set_key, dbo_id)
        return results

    def delete_object_set(self, dbo_class, set_key=None):
        if not set_key:
            set_key = dbo_class.dbo_set_key
        for dbo in self.load_object_set(dbo_class, set_key):
            self.delete_object(dbo)
        self.delete_key(set_key)

    def reload_object(self, dbo_key):
        dbo = self._object_map.get(dbo_key)
        if dbo:
            json_str = self.redis.get(dbo_key)
            if not json_str:
                log.warn("Failed to find {} in database for reload", dbo_key)
                return None
            return self.update_object(dbo, json_decode(json_str))
        return self.load_object(dbo_key)

    def evict_object(self, dbo):
        self._object_map.pop(dbo.dbo_key, None)

    def load_value(self, key, default=None):
        json = self.redis.get(key)
        if json:
            return json_decode(json)
        return default

    def save_value(self, key, value):
        self.redis.set(key, json_encode(value))

    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)

    def add_set_key(self, set_key, *values):
        self.redis.sadd(set_key, *values)

    def delete_set_key(self, set_key, value):
        self.redis.srem(set_key, value)

    def set_key_exists(self, set_key, value):
        return self.redis.sismember(set_key, value)

    def db_counter(self, counter_id, inc=1):
        return self.redis.incr("counter:{}".format(counter_id), inc)

    def delete_key(self, key):
        self.redis.delete(key)

    def set_index(self, index_name, key, value):
        return self.redis.hset(index_name, key, value)

    def get_index(self, index_name, key):
        return self.redis.hget(index_name, key)

    def get_full_index(self, index_name):
        return self.redis.hgetall(index_name)

    def delete_index(self, index_name, key):
        return self.redis.hdel(index_name, key)

    def get_all_hash(self, index_name):
        return {
            key: json_decode(value)
            for key, value in self.redis.hgetall(index_name).items()
        }

    def get_hash_keys(self, hash_id):
        return self.redis.hkeys(hash_id)

    def set_db_hash(self, hash_id, hash_key, value):
        return self.redis.hset(hash_id, hash_key, json_encode(value))

    def get_db_hash(self, hash_id, hash_key):
        return json_decode(self.redis.hget(hash_id, hash_key))

    def remove_db_hash(self, hash_id, hash_key):
        self.redis.hdel(hash_id, hash_key)

    def get_all_db_hash(self, hash_id):
        return [
            json_decode(value)
            for value in self.redis.hgetall(hash_id).values()
        ]

    def get_db_list(self, list_id, start=0, end=-1):
        return [
            json_decode(value)
            for value in self.redis.lrange(list_id, start, end)
        ]

    def add_db_list(self, list_id, value):
        self.redis.lpush(list_id, json_encode(value))

    def trim_db_list(self, list_id, start, end):
        return self.redis.ltrim(list_id, start, end)

    def dbo_holders(self, dbo_key, degrees=0):
        all_keys = set()

        def find(find_key, degree):
            holder_keys = self.fetch_set_keys('{}:holders'.format(find_key))
            for new_key in holder_keys:
                if new_key != dbo_key and new_key not in all_keys:
                    all_keys.add(new_key)
                    if degree < degrees:
                        find(new_key, degree + 1)

        find(dbo_key, 0)
        return all_keys

    def _json_to_obj(self, json_str, key_type, dbo_id):
        dbo_dict = json_decode(json_str)
        dbo = get_mixed_type(key_type, dbo_dict.get('mixins'))()
        dbo.dbo_id = dbo_id
        dbo.hydrate(dbo_dict)
        self._object_map[dbo.dbo_key] = dbo
        return dbo

    def _update_indexes(self, dbo):
        try:
            old_dbo = json_decode(self.redis.get(dbo.dbo_key))
        except TypeError:
            old_dbo = None

        for ix_name in dbo.dbo_indexes:
            new_val = getattr(dbo, ix_name, None)
            old_val = old_dbo.get(ix_name) if old_dbo else None
            if old_val == new_val:
                continue
            ix_key = 'ix:{}:{}'.format(dbo.dbo_key_type, ix_name)
            if old_val is not None:
                self.delete_index(ix_key, old_val)
            if new_val is not None and new_val != '':
                if self.get_index(ix_key, new_val):
                    raise NonUniqueError(ix_key, new_val)
                self.set_index(ix_key, new_val, dbo.dbo_id)

    def _clear_old_refs(self, dbo):
        dbo_key = dbo.dbo_key
        ref_key = '{}:refs'.format(dbo_key)
        for ref_id in self.fetch_set_keys(ref_key):
            holder_key = '{}:holders'.format(ref_id)
            self.delete_set_key(holder_key, dbo_key)
        self.delete_key(ref_key)

    def _set_new_refs(self, dbo, new_refs):
        dbo_key = dbo.dbo_key
        self.add_set_key("{}:refs".format(dbo_key), *new_refs)
        for ref_id in new_refs:
            holder_key = '{}:holders'.format(ref_id)
            self.add_set_key(holder_key, dbo_key)
Esempio n. 34
0
class BaseConnection(object):
    def __init__(self, response_queue_size=None, enquire_timeout=None, logger=None):
        self.proto = Proto(logger)
        self.sequence_number = 0

        self.last_enquire = time.time()
        self.enquire_timeout = enquire_timeout or 300
        self.enquire_response = None

        self.response_queue = deque([], response_queue_size or 1000)
        self.response_map = WeakValueDictionary()

    def next_sequence(self):
        self.sequence_number += 1
        return self.sequence_number

    def reply(self, cmd):
        return self.on_send(self.proto.send_bytes(cmd))

    def send(self, cmd, callback=None, notify=True):
        cmd.sequence_number = self.next_sequence()
        resp = Response(cmd, callback)
        if notify:
            self.response_map[cmd.sequence_number] = resp
            self.response_queue.append(resp)
        self.reply(cmd)
        return resp

    def on_send(self, data):  # pragma: no cover
        pass

    def on_close(self):  # pragma: no cover
        pass

    def on_deliver(self, req, resp, reply):  # pragma: no cover
        pass

    def _make_reply(self, resp):
        def reply():
            reply.called = True
            return self.reply(resp)
        reply.called = False
        return reply

    def _deliver(self, req, resp):
        reply = self._make_reply(resp)
        self.on_deliver(req, resp, reply)
        if not reply.called:
            self.reply(resp)

    def handle(self, cmd):
        self.last_enquire = time.time()
        cmd_type = type(cmd)
        seq = {'sequence_number': cmd.sequence_number}
        if cmd_type is command.EnquireLink:
            self.reply(command.EnquireLinkResp(**seq))
        elif cmd_type is command.Unbind:
            self.reply(command.UnbindResp(**seq))
            self.on_close()
        elif cmd_type.is_response:
            resp = self.response_map.pop(cmd.sequence_number, None)
            if resp:
                resp.resolve(cmd)
        else:
            resp = cmd_type.response(**seq)
            self._deliver(cmd, resp)

    def feed(self, data):
        for e in self.proto.receive_bytes(data):
            self.handle(e)

    def ping(self, response_timeout=10):
        now = time.time()
        if self.enquire_response:
            if self.enquire_response.ready:
                self.enquire_response = None
            elif self.enquire_response.expire < now:
                raise BrokenLink('SMPP link broken: no response from SMSC')
        elif self.last_enquire + self.enquire_timeout < now:
            self.enquire_response = self.send(command.EnquireLink())
            self.enquire_response.expire = now + response_timeout

    def unbind(self):
        return self.send(command.Unbind(), lambda _: self.on_close())
Esempio n. 35
0
class BaseConnection(object):
    def __init__(self,
                 response_queue_size=None,
                 enquire_timeout=None,
                 logger=None):
        self.proto = Proto(logger)
        self.sequence_number = 0

        self.last_enquire = time.time()
        self.enquire_timeout = enquire_timeout or 300
        self.enquire_response = None

        self.response_queue = deque([], response_queue_size or 1000)
        self.response_map = WeakValueDictionary()

    def next_sequence(self):
        self.sequence_number += 1
        return self.sequence_number

    def reply(self, cmd):
        return self.on_send(self.proto.send_bytes(cmd))

    def send(self, cmd, callback=None, notify=True):
        cmd.sequence_number = self.next_sequence()
        resp = Response(cmd, callback)
        if notify:
            self.response_map[cmd.sequence_number] = resp
            self.response_queue.append(resp)
        self.reply(cmd)
        return resp

    def on_send(self, data):  # pragma: no cover
        pass

    def on_close(self):  # pragma: no cover
        pass

    def on_deliver(self, req, resp, reply):  # pragma: no cover
        pass

    def _make_reply(self, resp):
        def reply():
            reply.called = True
            return self.reply(resp)

        reply.called = False
        return reply

    def _deliver(self, req, resp):
        reply = self._make_reply(resp)
        self.on_deliver(req, resp, reply)
        if not reply.called:
            self.reply(resp)

    def handle(self, cmd):
        self.last_enquire = time.time()
        cmd_type = type(cmd)
        seq = {'sequence_number': cmd.sequence_number}
        if cmd_type is command.EnquireLink:
            self.reply(command.EnquireLinkResp(**seq))
        elif cmd_type is command.Unbind:
            self.reply(command.UnbindResp(**seq))
            self.on_close()
        elif cmd_type.is_response:
            resp = self.response_map.pop(cmd.sequence_number, None)
            if resp:
                resp.resolve(cmd)
        else:
            resp = cmd_type.response(**seq)
            self._deliver(cmd, resp)

    def feed(self, data):
        for e in self.proto.receive_bytes(data):
            self.handle(e)

    def ping(self, response_timeout=10):
        now = time.time()
        if self.enquire_response:
            if self.enquire_response.ready:
                self.enquire_response = None
            elif self.enquire_response.expire < now:
                raise BrokenLink('SMPP link broken: no response from SMSC')
        elif self.last_enquire + self.enquire_timeout < now:
            self.enquire_response = self.send(command.EnquireLink())
            self.enquire_response.expire = now + response_timeout

    def unbind(self):
        return self.send(command.Unbind(), lambda _: self.on_close())
Esempio n. 36
0
File: memory.py Progetto: oeway/WorQ
class TaskQueue(AbstractTaskQueue):
    """Simple in-memory task queue implementation"""

    @classmethod
    def factory(cls, url, name=const.DEFAULT, *args, **kw):
        obj = _REFS.get((url, name))
        if obj is None:
            obj = _REFS[(url, name)] = cls(url, name, *args, **kw)
        return obj

    def __init__(self, *args, **kw):
        super(TaskQueue, self).__init__(*args, **kw)
        self.queue = Queue()
        self.results = WeakValueDictionary()
        self.results_lock = Lock()

    def _init_result(self, result, status, message):
        with self.results_lock:
            if result.id in self.results:
                return False
            self.results[result.id] = result
        result.__status = status
        result.__value = Queue()
        result.__task = message
        result.__args = {}
        result.__lock = Lock()
        result.__for = None
        return True

    def enqueue_task(self, result, message):
        if self._init_result(result, const.ENQUEUED, message):
            self.queue.put(result)
            return True
        return False

    def defer_task(self, result, message, args):
        if self._init_result(result, const.PENDING, message):
            results = self.results
            # keep references to results to prevent GC
            result.__refs = [results.get(arg) for arg in args]
            return True
        return False

    def undefer_task(self, task_id):
        result = self.results[task_id]
        self.queue.put(result)

    def get(self, timeout=None):
        try:
            result = self.queue.get(timeout=timeout)
        except Empty:
            return None
        result.__status = const.PROCESSING
        return result.id, result.__task

    def size(self):
        return len(self.results)

    def discard_pending(self):
        with self.results_lock:
            while True:
                try:
                    self.queue.get_nowait()
                except Empty:
                    break
            self.results.clear()

    def reserve_argument(self, argument_id, deferred_id):
        result = self.results.get(argument_id)
        if result is None:
            return (False, None)
        with result.__lock:
            if result.__for is not None:
                return (False, None)
            result.__for = deferred_id
            try:
                message = result.__value.get_nowait()
            except Empty:
                message = None
            if message is not None:
                with self.results_lock:
                    self.results.pop(argument_id, None)
            return (True, message)

    def set_argument(self, task_id, argument_id, message):
        result = self.results[task_id]
        with self.results_lock:
            self.results.pop(argument_id, None)
        with result.__lock:
            result.__args[argument_id] = message
            return len(result.__args) == len(result.__refs)

    def get_arguments(self, task_id):
        try:
            return self.results[task_id].__args
        except KeyError:
            return {}

    def set_task_timeout(self, task_id, timeout):
        pass

    def get_status(self, task_id):
        result = self.results.get(task_id)
        return None if result is None else result.__status

    def set_result(self, task_id, message, timeout):
        result = self.results.get(task_id)
        if result is not None:
            with result.__lock:
                result.__value.put(message)
                return result.__for

    def pop_result(self, task_id, timeout):
        result = self.results.get(task_id)
        if result is None:
            return const.TASK_EXPIRED
#        with result.__lock:
#            if result.__for is not None:
#                raise NotImplementedError
#                #return const.RESERVED
#            result.__for = task_id
        try:
            if timeout == 0:
                value = result.__value.get_nowait()
            else:
                value = result.__value.get(timeout=timeout)
        except Empty:
            value = None
        else:
            self.results.pop(task_id, None)
        return value

    def discard_result(self, task_id, task_expired_token):
        result = self.results.pop(task_id)
        if result is not None:
            result.__value.put(task_expired_token)
Esempio n. 37
0
class UDPDemux(object):
    """Explicitly routing UDP demux

    This class implements a demux that forwards packets from the root
    socket to sockets belonging to connections. It does this whenever its
    service method is invoked.

    Methods:

      remove_connection -- remove an existing connection
      service -- distribute datagrams from the root socket to connections
      forward -- forward a stored datagram to a connection
    """

    _forwarding_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    _forwarding_socket.bind(('127.0.0.1', 0))

    def __init__(self, datagram_socket):
        """Constructor

        Arguments:
        datagram_socket -- the root socket; this must be a bound, unconnected
                           datagram socket
        """

        if (datagram_socket.type & socket.SOCK_DGRAM) != socket.SOCK_DGRAM:
            raise InvalidSocketError("datagram_socket is not of " +
                                     "type SOCK_DGRAM")
        try:
            datagram_socket.getsockname()
        except:
            raise InvalidSocketError("datagram_socket is unbound")
        try:
            datagram_socket.getpeername()
        except:
            pass
        else:
            raise InvalidSocketError("datagram_socket is connected")

        self.datagram_socket = datagram_socket
        self.payload = ""
        self.payload_peer_address = None
        self.connections = WeakValueDictionary()

    def get_connection(self, address):
        """Create or retrieve a muxed connection

        Arguments:
        address -- a peer endpoint in IPv4/v6 address format; None refers
                   to the connection for unknown peers

        Return:
        a bound, connected datagram socket instance
        """

        if address in self.connections:
            return self.connections[address]
        
        # We need a new datagram socket on a dynamically assigned ephemeral port
        conn = socket.socket(self._forwarding_socket.family,
                             self._forwarding_socket.type,
                             self._forwarding_socket.proto)
        conn.bind((self._forwarding_socket.getsockname()[0], 0))
        conn.connect(self._forwarding_socket.getsockname())
        if not address:
            conn.setblocking(0)
        self.connections[address] = conn
        _logger.debug("Created new connection for address: %s", address)
        return conn

    def remove_connection(self, address):
        """Remove a muxed connection

        Arguments:
        address -- an address that was previously returned by the service
                   method and whose connection has not yet been removed

        Return:
        the socket object whose connection has been removed
        """

        return self.connections.pop(address)

    def service(self):
        """Service the root socket

        Read from the root socket and forward one datagram to a
        connection. The call will return without forwarding data
        if any of the following occurs:

          * An error is encountered while reading from the root socket
          * Reading from the root socket times out
          * The root socket is non-blocking and has no data available
          * An empty payload is received
          * A non-empty payload is received from an unknown peer (a peer
            for which get_connection has not yet been called); in this case,
            the payload is held by this instance and will be forwarded when
            the forward method is called

        Return:
        if the datagram received was from a new peer, then the peer's
        address; otherwise None
        """

        self.payload, self.payload_peer_address = \
          self.datagram_socket.recvfrom(UDP_MAX_DGRAM_LENGTH)
        _logger.debug("Received datagram from peer: %s",
                      self.payload_peer_address)
        if not self.payload:
            self.payload_peer_address = None
            return
        if self.payload_peer_address in self.connections:
            self.forward()
        else:
            return self.payload_peer_address

    def forward(self):
        """Forward a stored datagram

        When the service method returns the address of a new peer, it holds
        the datagram from that peer in this instance. In this case, this
        method will perform the forwarding step. The target connection is the
        one associated with address None if get_connection has not been called
        since the service method returned the new peer's address, and the
        connection associated with the new peer's address if it has.
        """

        assert self.payload
        assert self.payload_peer_address
        if self.payload_peer_address in self.connections:
            conn = self.connections[self.payload_peer_address]
            default = False
        else:
            conn = self.connections[None]  # propagate exception if not created
            default = True
        _logger.debug("Forwarding datagram from peer: %s, default: %s",
                      self.payload_peer_address, default)
        self._forwarding_socket.sendto(self.payload, conn.getsockname())
        self.payload = ""
        self.payload_peer_address = None
Esempio n. 38
0
class TrollReactor(CBDictInterface):
    """Base class for Omegle API.
    """
    def __init__(self, transmog=Transmogrifier(), listen=InteractiveViewport(),
                 n=2, refresh=2., debug=0):
        # Independent setup
        super(TrollReactor, self).__init__()
        self.debug = debug

        self.listeners = WeakValueDictionary()
        # Argument assignment
        self.eventQueue = deque()
        self.connectTransmogrifier(transmog)
        self.addListeners(listen)
        self._n = n
        self.refresh = refresh

        self._allConnected = False
        self.reconnectWait = 2.
        self.idleTime = None
        self.initializeStrangers()  # Now we wait to receive idSet events

    def connectTransmogrifier(self, transmog):
        self.transmogrifier = transmog
        self.transmogrifier.connect(self.eventQueue)

    def initializeStrangers(self):
        self._volatile = dict((Stranger(reactor, self, HTTP), None) for _ in xrange(self._n))
        self._waiting = len(self._volatile.keys())
        self.strangers = {}
        self.idleTime = time()
        self._allConnected = False

    def multicastDisconnect(self, ids):
        """Announce disconnect for a group of strangers.

        ids : iterable
            id strings of strangers from whom to politely disconnect.
        """
        for i in ids:
            self.strangers[i].announceDisconnect()

    def restart(self):
        self.strangers.clear()
        self.eventQueue.clear()
        self._allConnected = False
        sleep(self.reconnectWait)  # blocking is OK here.  We are trying to *avoid* making connections.
        self.initializeStrangers()

    def pumpEvents(self):
        for id_ in self.strangers:
            self.strangers[id_].getEventsPage()

        reactor.callLater(self.refresh, self.pumpEvents)

    def on_idSet(self, ev):
        for s in self._volatile:
            if s.id == ev.id:  # we have the stranger that notified
                self.strangers[s.id] = s  # move to {id: stranger} dict
                self._waiting -= 1

        assert self._waiting >= 0, "Too many stranger IDs"
        if self._waiting == 0:
            self._allConnected = True
            self.idleTime = time()
            self.pumpEvents()

    def on_error(self, ev):
        # TODO:  handle RECAPCHA
        pass

    def addListeners(self, listeners):
        """Add a listener or group of listeners to the reactor.

        listeners : CBDictInterface instance or iterable
        """
        listeners = mkIterableSequence(listeners)

        for listen in listeners:
            self.listeners[listen] = listen  # weak-value dict

    def removeListener(self, listener):
        self.listeners.pop(listener)

    def _processEventQueue(self):
        while len(self.eventQueue):
            ev = self.eventQueue.popleft()
            for listener in self.listeners:
                listener.notify(ev)

            self.notify(ev)

    def deltaIdleTime(self):
        return time() - self.idleTime

    def idle(self):
        """Respond to idle state.

        This function is run whenever feed encounters a null event, and
        does nothing by default.  Override to define functionality.
        """
        pass

    def feed(self, events):
        """Notify the TrollReactor of event(s).
        """
        if not events or events is NULL_EVENT:
            events = (NULL_EVENT,)
            self.idle()
        else:
            self.idleTime = time()

        if isEvent(events):  # if events is a single event
            events = (events,)

        self.transmogrifier(events)
        self._processEventQueue()
Esempio n. 39
0
class Signal(object):
    def __init__(self):
        self.__slots = WeakValueDictionary()

    def __call__(self, *args, **kargs):
        for key in self.__slots:
            func, _ = key
            func(self.__slots[key], *args, **kargs)

    def connect(self, slot):
        if PYTHON3:
            key = (slot.__func__, id(slot.__self__))
            self.__slots[key] = slot.__self__
        else:
            key = (slot.im_func, id(slot.im_self))
            self.__slots[key] = slot.im_self

    def disconnect(self, slot):
        if PYTHON3:
            key = (slot.__func__, id(slot.__self__))
            if key in self.__slots:
                self.__slots.pop(key)
        else:
            key = (slot.im_func, id(slot.im_self))
            if key in self.__slots:
                self.__slots.pop(key)

    def clear(self):
        self.__slots.clear()

        ## Sample usage:
        #class Model(object):
        #  def __init__(self, value):
        #    self.__value = value
        #    self.changed = Signal()
        #
        #  def set_value(self, value):
        #    self.__value = value
        #    self.changed() # Emit signal
        #
        #  def get_value(self):
        #    return self.__value
        #
        #
        #class View(object):
        #  def __init__(self, model):
        #    self.model = model
        #    model.changed.connect(self.model_changed)
        #
        #  def model_changed(self):
        #    print "New value:", self.model.get_value()
        #
        #
        #model = Model(10)
        #view1 = View(model)
        #view2 = View(model)
        #view3 = View(model)
        #
        #model.set_value(20)
        #
        #del view1
        #model.set_value(30)
        #
        #model.changed.clear()
        #model.set_value(40)
        ### end of http://code.activestate.com/recipes/576477/ }}}
Esempio n. 40
0
class Boss:
    def __init__(self, os_window_id, opts, args, cached_values):
        self.window_id_map = WeakValueDictionary()
        self.cached_values = cached_values
        self.os_window_map = {}
        self.cursor_blinking = True
        self.shutting_down = False
        talk_fd = getattr(single_instance, 'socket', None)
        talk_fd = -1 if talk_fd is None else talk_fd.fileno()
        listen_fd = -1
        if opts.allow_remote_control and args.listen_on:
            listen_fd = listen_on(args.listen_on)
        self.child_monitor = ChildMonitor(
            self.on_child_death,
            DumpCommands(args) if args.dump_commands or args.dump_bytes else
            None, talk_fd, listen_fd)
        set_boss(self)
        self.current_font_size = opts.font_size
        set_font_family(opts)
        self.opts, self.args = opts, args
        initialize_renderer()
        startup_session = create_session(opts, args)
        self.add_os_window(startup_session, os_window_id=os_window_id)

    def add_os_window(self,
                      startup_session,
                      os_window_id=None,
                      wclass=None,
                      wname=None,
                      size=None,
                      startup_id=None):
        dpi_changed = False
        if os_window_id is None:
            w, h = initial_window_size(
                self.opts, self.cached_values) if size is None else size
            cls = wclass or self.args.cls or appname
            os_window_id = create_os_window(w, h, appname, wname
                                            or self.args.name or cls, cls)
            if startup_id:
                ctx = init_startup_notification(os_window_id, startup_id)
            dpi_changed = show_window(os_window_id)
            if startup_id:
                end_startup_notification(ctx)
        tm = TabManager(os_window_id, self.opts, self.args, startup_session)
        self.os_window_map[os_window_id] = tm
        if dpi_changed:
            self.on_dpi_change(os_window_id)

    def list_os_windows(self):
        for os_window_id, tm in self.os_window_map.items():
            yield {
                'id': os_window_id,
                'tabs': list(tm.list_tabs()),
            }

    def match_windows(self, match):
        field, exp = match.split(':', 1)
        pat = re.compile(exp)
        for tm in self.os_window_map.values():
            for tab in tm:
                for window in tab:
                    if window.matches(field, pat):
                        yield window

    def tab_for_window(self, window):
        for tm in self.os_window_map.values():
            for tab in tm:
                for w in tab:
                    if w.id == window.id:
                        return tab

    def match_tabs(self, match):
        field, exp = match.split(':', 1)
        pat = re.compile(exp)
        tms = tuple(self.os_window_map.values())
        found = False
        if field in ('title', 'id'):
            for tm in tms:
                for tab in tm:
                    if tab.matches(field, pat):
                        yield tab
                        found = True
        if not found:
            tabs = {self.tab_for_window(w) for w in self.match_windows(match)}
            for tab in tabs:
                if tab:
                    yield tab

    def set_active_window(self, window):
        for tm in self.os_window_map.values():
            for tab in tm:
                for w in tab:
                    if w.id == window.id:
                        if tab is not self.active_tab:
                            tm.set_active_tab(tab)
                        tab.set_active_window(w)
                        return

    def _new_os_window(self, args, cwd_from=None):
        sw = self.args_to_special_window(args, cwd_from) if args else None
        startup_session = create_session(self.opts,
                                         special_window=sw,
                                         cwd_from=cwd_from)
        self.add_os_window(startup_session)

    def new_os_window(self, *args):
        self._new_os_window(args)

    def new_os_window_with_cwd(self, *args):
        w = self.active_window
        cwd_from = w.child.pid if w is not None else None
        self._new_os_window(args, cwd_from)

    def add_child(self, window):
        self.child_monitor.add_child(window.id, window.child.pid,
                                     window.child.child_fd, window.screen)
        self.window_id_map[window.id] = window

    def _handle_remote_command(self, cmd, window=None):
        response = None
        if self.opts.allow_remote_control:
            try:
                response = handle_cmd(self, window, cmd)
            except Exception as err:
                import traceback
                response = {
                    'ok': False,
                    'error': str(err),
                    'tb': traceback.format_exc()
                }
        else:
            response = {
                'ok':
                False,
                'error':
                'Remote control is disabled. Add allow_remote_control yes to your kitty.conf'
            }
        return response

    def peer_message_received(self, msg):
        import json
        msg = msg.decode('utf-8')
        cmd_prefix = '\x1bP@kitty-cmd'
        if msg.startswith(cmd_prefix):
            cmd = msg[len(cmd_prefix):-2]
            response = self._handle_remote_command(cmd)
            if response is not None:
                response = (cmd_prefix + json.dumps(response) +
                            '\x1b\\').encode('utf-8')
            return response
        else:
            msg = json.loads(msg)
            if isinstance(msg, dict) and msg.get('cmd') == 'new_instance':
                startup_id = msg.get('startup_id')
                args, rest = parse_args(msg['args'][1:])
                args.args = rest
                opts = create_opts(args)
                session = create_session(opts, args)
                self.add_os_window(session,
                                   wclass=args.cls,
                                   wname=args.name,
                                   size=initial_window_size(
                                       opts, self.cached_values),
                                   startup_id=startup_id)
            else:
                log_error('Unknown message received from peer, ignoring')

    def handle_remote_cmd(self, cmd, window=None):
        response = self._handle_remote_command(cmd, window)
        if response is not None:
            if window is not None:
                window.send_cmd_response(response)

    def on_child_death(self, window_id):
        window = self.window_id_map.pop(window_id, None)
        if window is None:
            return
        if window.action_on_close:
            try:
                window.action_on_close(window)
            except Exception:
                import traceback
                traceback.print_exc()
        os_window_id = window.os_window_id
        window.destroy()
        tm = self.os_window_map.get(os_window_id)
        if tm is None:
            return
        for tab in tm:
            if window in tab:
                break
        else:
            return
        tab.remove_window(window)
        if len(tab) == 0:
            tm.remove(tab)
            tab.destroy()
            if len(tm) == 0:
                if not self.shutting_down:
                    mark_os_window_for_close(os_window_id)
                    glfw_post_empty_event()

    def close_window(self, window=None):
        if window is None:
            window = self.active_window
        self.child_monitor.mark_for_close(window.id)

    def close_tab(self, tab=None):
        if tab is None:
            tab = self.active_tab
        for window in tab:
            self.close_window(window)

    def toggle_fullscreen(self):
        toggle_fullscreen()

    def start(self):
        if not getattr(self, 'io_thread_started', False):
            self.child_monitor.start()
            self.io_thread_started = True

    def activate_tab_at(self, os_window_id, x):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            tm.activate_tab_at(x)

    def on_window_resize(self, os_window_id, w, h, dpi_changed):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            if dpi_changed:
                if set_dpi_from_os_window(os_window_id):
                    self.on_dpi_change(os_window_id)
                else:
                    tm.resize()
            else:
                tm.resize()

    def increase_font_size(self):
        self.change_font_size(
            min(self.opts.font_size * 5,
                self.current_font_size + self.opts.font_size_delta))

    def decrease_font_size(self):
        self.change_font_size(
            max(MINIMUM_FONT_SIZE,
                self.current_font_size - self.opts.font_size_delta))

    def restore_font_size(self):
        self.change_font_size(self.opts.font_size)

    def _change_font_size(self, new_size=None, on_dpi_change=False):
        if new_size is not None:
            self.current_font_size = new_size
        old_cell_width, old_cell_height = viewport_for_window()[-2:]
        windows = tuple(filter(None, self.window_id_map.values()))
        resize_fonts(self.current_font_size, on_dpi_change=on_dpi_change)
        layout_sprite_map()
        prerender()
        for window in windows:
            window.screen.rescale_images(old_cell_width, old_cell_height)
            window.screen.refresh_sprite_positions()
        for tm in self.os_window_map.values():
            tm.resize()
            tm.refresh_sprite_positions()
        glfw_post_empty_event()

    def change_font_size(self, new_size):
        if new_size == self.current_font_size:
            return
        self._change_font_size(new_size)

    def on_dpi_change(self, os_window_id):
        self._change_font_size()

    @property
    def active_tab_manager(self):
        os_window_id = current_os_window()
        return self.os_window_map.get(os_window_id)

    @property
    def active_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            return tm.active_tab

    @property
    def active_window(self):
        t = self.active_tab
        if t is not None:
            return t.active_window

    def dispatch_special_key(self, key, scancode, action, mods):
        # Handles shortcuts, return True if the key was consumed
        key_action = get_shortcut(self.opts.keymap, mods, key, scancode)
        self.current_key_press_info = key, scancode, action, mods
        return self.dispatch_action(key_action)

    def default_bg_changed_for(self, window_id):
        w = self.window_id_map.get(window_id)
        if w is not None:
            tm = self.os_window_map.get(w.os_window_id)
            if tm is not None:
                t = tm.tab_for_id(w.tab_id)
                if t is not None:
                    t.relayout_borders()

    def dispatch_action(self, key_action):
        if key_action is not None:
            f = getattr(self, key_action.func, None)
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        tab = self.active_tab
        if tab is None:
            return False
        window = self.active_window
        if window is None:
            return False
        if key_action is not None:
            f = getattr(tab, key_action.func,
                        getattr(window, key_action.func, None))
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        return False

    def combine(self, *actions):
        for key_action in actions:
            self.dispatch_action(key_action)

    def on_focus(self, os_window_id, focused):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.focus_changed(focused)

    def on_drop(self, os_window_id, paths):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.paste('\n'.join(paths))

    def on_os_window_closed(self, os_window_id, viewport_width,
                            viewport_height):
        self.cached_values['window-size'] = viewport_width, viewport_height
        tm = self.os_window_map.pop(os_window_id, None)
        if tm is not None:
            tm.destroy()
        for window_id in tuple(
                w.id for w in self.window_id_map.values()
                if getattr(w, 'os_window_id', None) == os_window_id):
            self.window_id_map.pop(window_id, None)

    def display_scrollback(self, window, data):
        tab = self.active_tab
        if tab is not None and window.overlay_for is None:
            tab.new_special_window(
                SpecialWindow(self.opts.scrollback_pager,
                              data,
                              _('History'),
                              overlay_for=window.id))

    def edit_config_file(self, *a):
        confpath = prepare_config_file_for_editing()
        # On macOS vim fails to handle SIGWINCH if it occurs early, so add a
        # small delay.
        cmd = [
            'kitty', '+runpy',
            'import os, sys, time; time.sleep(0.05); os.execvp(sys.argv[1], sys.argv[1:])'
        ] + editor + [confpath]
        self.new_os_window(*cmd)

    def input_unicode_character(self):
        w = self.active_window
        tab = self.active_tab
        if w is not None and tab is not None and w.overlay_for is None:
            overlay_window = tab.new_special_window(
                SpecialWindow([
                    'kitty', '+runpy',
                    'from kittens.unicode_input.main import main; main()'
                ],
                              overlay_for=w.id))
            overlay_window.action_on_close = partial(
                self.send_unicode_character, w.id)

    def send_unicode_character(self, target_window_id, source_window):
        w = self.window_id_map.get(target_window_id)
        if w is not None:
            output = str(source_window.screen.linebuf.line(0))
            if output.startswith('OK: '):
                try:
                    text = chr(int(output.partition(' ')[2], 16))
                except Exception:
                    import traceback
                    traceback.print_exc()
                else:
                    w.paste(text)

    def run_simple_kitten(self, type_of_input, kitten, *args):
        import shlex
        w = self.active_window
        tab = self.active_tab
        if w is not None and tab is not None and w.overlay_for is None:
            cmdline = args[0] if args else ''
            args = shlex.split(cmdline) if cmdline else []
            if '--program' not in cmdline:
                args.extend(('--program', self.opts.open_url_with))
            if type_of_input in ('text', 'history', 'ansi', 'ansi-history'):
                data = w.as_text(as_ansi='ansi' in type_of_input,
                                 add_history='history'
                                 in type_of_input).encode('utf-8')
            elif type_of_input == 'none':
                data = None
            else:
                raise ValueError(
                    'Unknown type_of_input: {}'.format(type_of_input))
            tab.new_special_window(
                SpecialWindow([
                    'kitty', '+runpy',
                    'from kittens.{}.main import main; main()'.format(kitten)
                ] + args,
                              stdin=data,
                              overlay_for=w.id))

    def switch_focus_to(self, window_idx):
        tab = self.active_tab
        tab.set_active_window_idx(window_idx)
        old_focus = tab.active_window
        if not old_focus.destroyed:
            old_focus.focus_changed(False)
        tab.active_window.focus_changed(True)

    def open_url(self, url):
        if url:
            open_url(url, self.opts.open_url_with)

    def open_url_lines(self, lines):
        self.open_url(''.join(lines))

    def destroy(self):
        self.shutting_down = True
        self.child_monitor.shutdown_monitor()
        del self.child_monitor
        for tm in self.os_window_map.values():
            tm.destroy()
        self.os_window_map = {}
        destroy_sprite_map()
        destroy_global_data()

    def paste_to_active_window(self, text):
        if text:
            w = self.active_window
            if w is not None:
                w.paste(text)

    def paste_from_clipboard(self):
        text = get_clipboard_string()
        self.paste_to_active_window(text)

    def paste_from_selection(self):
        text = get_primary_selection()
        self.paste_to_active_window(text)

    def set_primary_selection(self):
        w = self.active_window
        if w is not None and not w.destroyed:
            text = w.text_for_selection()
            if text:
                set_primary_selection(text)
                if self.opts.copy_on_select:
                    set_clipboard_string(text)

    def goto_tab(self, tab_num):
        tm = self.active_tab_manager
        if tm is not None:
            tm.goto_tab(tab_num - 1)

    def next_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab()

    def previous_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab(-1)

    def args_to_special_window(self, args, cwd_from=None):
        args = list(args)
        stdin = None
        w = self.active_window

        def data_for_at(arg):
            if arg == '@selection':
                return w.text_for_selection()
            if arg == '@ansi':
                return w.as_text(as_ansi=True, add_history=True)
            if arg == '@text':
                return w.as_text(add_history=True)
            if arg == '@screen':
                return w.as_text()
            if arg == '@ansi_screen':
                return w.as_text(as_ansi=True)

        if args[0].startswith('@'):
            stdin = data_for_at(args[0]) or None
            if stdin is not None:
                stdin = stdin.encode('utf-8')
            del args[0]

        cmd = []
        for arg in args:
            if arg == '@selection':
                arg = data_for_at(arg)
                if not arg:
                    continue
            cmd.append(arg)
        return SpecialWindow(cmd, stdin, cwd_from=cwd_from)

    def _new_tab(self, args, cwd_from=None):
        special_window = None
        if args:
            if isinstance(args, SpecialWindowInstance):
                special_window = args
            else:
                special_window = self.args_to_special_window(args,
                                                             cwd_from=cwd_from)
        tm = self.active_tab_manager
        if tm is not None:
            tm.new_tab(special_window=special_window, cwd_from=cwd_from)

    def new_tab(self, *args):
        self._new_tab(args)

    def new_tab_with_cwd(self, *args):
        w = self.active_window
        cwd_from = w.child.pid if w is not None else None
        self._new_tab(args, cwd_from=cwd_from)

    def _new_window(self, args, cwd_from=None):
        tab = self.active_tab
        if tab is not None:
            if args:
                tab.new_special_window(
                    self.args_to_special_window(args, cwd_from=cwd_from))
            else:
                tab.new_window(cwd_from=cwd_from)

    def new_window(self, *args):
        self._new_window(args)

    def new_window_with_cwd(self, *args):
        w = self.active_window
        if w is None:
            return self.new_window(*args)
        cwd_from = w.child.pid if w is not None else None
        self._new_window(args, cwd_from=cwd_from)

    def move_tab_forward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(1)

    def move_tab_backward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(-1)
Esempio n. 41
0
class Entity:
    
    
    def __init__(self, entType, entValue, entField):
        if isinstance(entField, Field):
            self.type = entType
            self.value = entValue
            self.field = entField
            self.group = None
            self.links = WeakValueDictionary() # dict of linked entities
            self.field.registerEntity(self) # update the entity registry
        else:
            raise TypeError("Invalid field argument, field instance expected!")
    
    
    def linkTo(self, eTwo):
        ''' Linking operation is bi-directional, affects both entities equally.'''
        # check if entities not already linked
        if Edge.linkId(self, eTwo) not in self.links.keys():
            # update both entities' list of links
            # create a new edge
            newlink = Edge(self, eTwo, self.field)
            self.links[newlink.id] = eTwo
            eTwo.links[newlink.id] = self
            # case when the first entity's group is not set
            if self.group is None:
                # assuming the second entity has already a group assigned
                try:
                    eTwo.group.addMember(self)
                # except the second entity has no group
                except AttributeError:
                    newGroup = Group(self.field)
                    newGroup.addMember(self)
                    newGroup.addMember(eTwo)
                    
            # case when the first entity's group is set, but the second entity's is not
            elif eTwo.group is None:
                self.group.addMember(eTwo)
            
            # case when both entities have groups set and they are different groups
            elif self.group.name != eTwo.group.name:
                if self.group.size > eTwo.group.size:
                    # first group wins
                    self.group.annexMembers(eTwo.group)
                else:
                    # second group wins
                    eTwo.group.annexMembers(self.group)
    
    
    def getLinks(self):
        ''' Print the list of entities directly linked.'''
        return self.links.values()
    
    
    def removeLink(self, eTwo):
        ''' Remove linked entity.'''
        linkId = Edge.linkId(self, eTwo)
        self.links.pop(linkId)
    
    
    def __repr__(self):
        return repr(self.value)
    
    
    def __del__(self):
        ''' Delete itself from linked entities, and delete links.'''
        # remove link from linked entity necessary? no because it's a weaklink
        for linkId in self.links.keys():
            self.field.eliminateEdge(linkId)
            
        del self
Esempio n. 42
0
class Boss:
    def __init__(self, os_window_id, opts, args, cached_values):
        self.window_id_map = WeakValueDictionary()
        self.startup_colors = {
            k: opts[k]
            for k in opts if isinstance(opts[k], Color)
        }
        self.pending_sequences = None
        self.cached_values = cached_values
        self.os_window_map = {}
        self.cursor_blinking = True
        self.shutting_down = False
        talk_fd = getattr(single_instance, 'socket', None)
        talk_fd = -1 if talk_fd is None else talk_fd.fileno()
        listen_fd = -1
        if opts.allow_remote_control and args.listen_on:
            listen_fd = listen_on(args.listen_on)
        self.child_monitor = ChildMonitor(
            self.on_child_death,
            DumpCommands(args) if args.dump_commands or args.dump_bytes else
            None, talk_fd, listen_fd)
        set_boss(self)
        self.opts, self.args = opts, args
        startup_session = create_session(opts, args)
        self.add_os_window(startup_session, os_window_id=os_window_id)

    def add_os_window(self,
                      startup_session,
                      os_window_id=None,
                      wclass=None,
                      wname=None,
                      opts_for_size=None,
                      startup_id=None):
        if os_window_id is None:
            opts_for_size = opts_for_size or self.opts
            cls = wclass or self.args.cls or appname
            with startup_notification_handler(
                    do_notify=startup_id is not None,
                    startup_id=startup_id) as pre_show_callback:
                os_window_id = create_os_window(
                    initial_window_size_func(opts_for_size,
                                             self.cached_values),
                    pre_show_callback, appname, wname or self.args.name or cls,
                    cls)
        tm = TabManager(os_window_id, self.opts, self.args, startup_session)
        self.os_window_map[os_window_id] = tm
        return os_window_id

    def list_os_windows(self):
        for os_window_id, tm in self.os_window_map.items():
            yield {
                'id': os_window_id,
                'tabs': list(tm.list_tabs()),
            }

    @property
    def all_tab_managers(self):
        yield from self.os_window_map.values()

    @property
    def all_tabs(self):
        for tm in self.all_tab_managers:
            yield from tm

    @property
    def all_windows(self):
        for tab in self.all_tabs:
            yield from tab

    def match_windows(self, match):
        try:
            field, exp = match.split(':', 1)
        except ValueError:
            return
        if field == 'num':
            tab = self.active_tab
            if tab is not None:
                try:
                    w = tab.get_nth_window(int(exp))
                except Exception:
                    return
                if w is not None:
                    yield w
        else:
            pat = re.compile(exp)
            for window in self.all_windows:
                if window.matches(field, pat):
                    yield window

    def tab_for_window(self, window):
        for tab in self.all_tabs:
            for w in tab:
                if w.id == window.id:
                    return tab

    def match_tabs(self, match):
        try:
            field, exp = match.split(':', 1)
        except ValueError:
            return
        pat = re.compile(exp)
        found = False
        if field in ('title', 'id'):
            for tab in self.all_tabs:
                if tab.matches(field, pat):
                    yield tab
                    found = True
        if not found:
            tabs = {self.tab_for_window(w) for w in self.match_windows(match)}
            for tab in tabs:
                if tab:
                    yield tab

    def set_active_window(self, window):
        for tm in self.os_window_map.values():
            for tab in tm:
                for w in tab:
                    if w.id == window.id:
                        if tab is not self.active_tab:
                            tm.set_active_tab(tab)
                        tab.set_active_window(w)
                        return

    def _new_os_window(self, args, cwd_from=None):
        sw = self.args_to_special_window(args, cwd_from) if args else None
        startup_session = create_session(self.opts,
                                         special_window=sw,
                                         cwd_from=cwd_from)
        return self.add_os_window(startup_session)

    def new_os_window(self, *args):
        self._new_os_window(args)

    def new_os_window_with_cwd(self, *args):
        w = self.active_window
        cwd_from = w.child.pid if w is not None else None
        self._new_os_window(args, cwd_from)

    def add_child(self, window):
        self.child_monitor.add_child(window.id, window.child.pid,
                                     window.child.child_fd, window.screen)
        self.window_id_map[window.id] = window

    def _handle_remote_command(self, cmd, window=None):
        response = None
        if self.opts.allow_remote_control or getattr(
                window, 'allow_remote_control', False):
            try:
                response = handle_cmd(self, window, cmd)
            except Exception as err:
                import traceback
                response = {'ok': False, 'error': str(err)}
                if not getattr(err, 'hide_traceback', False):
                    response['tb'] = traceback.format_exc()
        else:
            response = {
                'ok':
                False,
                'error':
                'Remote control is disabled. Add allow_remote_control yes to your kitty.conf'
            }
        return response

    def peer_message_received(self, msg):
        msg = msg.decode('utf-8')
        cmd_prefix = '\x1bP@kitty-cmd'
        if msg.startswith(cmd_prefix):
            cmd = msg[len(cmd_prefix):-2]
            response = self._handle_remote_command(cmd)
            if response is not None:
                response = (cmd_prefix + json.dumps(response) +
                            '\x1b\\').encode('utf-8')
            return response
        else:
            msg = json.loads(msg)
            if isinstance(msg, dict) and msg.get('cmd') == 'new_instance':
                startup_id = msg.get('startup_id')
                args, rest = parse_args(msg['args'][1:])
                args.args = rest
                opts = create_opts(args)
                if not os.path.isabs(args.directory):
                    args.directory = os.path.join(msg['cwd'], args.directory)
                session = create_session(opts, args, respect_cwd=True)
                self.add_os_window(session,
                                   wclass=args.cls,
                                   wname=args.name,
                                   opts_for_size=opts,
                                   startup_id=startup_id)
            else:
                log_error('Unknown message received from peer, ignoring')

    def handle_remote_cmd(self, cmd, window=None):
        response = self._handle_remote_command(cmd, window)
        if response is not None:
            if window is not None:
                window.send_cmd_response(response)

    def on_child_death(self, window_id):
        window = self.window_id_map.pop(window_id, None)
        if window is None:
            return
        if window.action_on_close:
            try:
                window.action_on_close(window)
            except Exception:
                import traceback
                traceback.print_exc()
        os_window_id = window.os_window_id
        window.destroy()
        tm = self.os_window_map.get(os_window_id)
        if tm is None:
            return
        for tab in tm:
            if window in tab:
                break
        else:
            return
        tab.remove_window(window)
        if len(tab) == 0:
            tm.remove(tab)
            tab.destroy()
            if len(tm) == 0:
                if not self.shutting_down:
                    mark_os_window_for_close(os_window_id)
                    glfw_post_empty_event()

    def close_window(self, window=None):
        if window is None:
            window = self.active_window
        self.child_monitor.mark_for_close(window.id)

    def close_tab(self, tab=None):
        if tab is None:
            tab = self.active_tab
        for window in tab:
            self.close_window(window)

    def toggle_fullscreen(self):
        toggle_fullscreen()

    def start(self):
        if not getattr(self, 'io_thread_started', False):
            self.child_monitor.start()
            self.io_thread_started = True

    def activate_tab_at(self, os_window_id, x):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            tm.activate_tab_at(x)

    def on_window_resize(self, os_window_id, w, h, dpi_changed):
        if dpi_changed:
            self.on_dpi_change(os_window_id)
        else:
            tm = self.os_window_map.get(os_window_id)
            if tm is not None:
                tm.resize()

    def increase_font_size(self):  # legacy
        self.set_font_size(
            min(self.opts.font_size * 5, self.current_font_size + 2.0))

    def decrease_font_size(self):  # legacy
        self.set_font_size(self.current_font_size - self.opts.font_size_delta)

    def restore_font_size(self):  # legacy
        self.set_font_size(self.opts.font_size)

    def set_font_size(self, new_size):  # legacy
        self.change_font_size(True, None, new_size)

    def change_font_size(self, all_windows, increment_operation, amt):
        def calc_new_size(old_size):
            new_size = old_size
            if amt == 0:
                new_size = self.opts.font_size
            else:
                if increment_operation:
                    new_size += (1 if increment_operation == '+' else -1) * amt
                else:
                    new_size = amt
                new_size = max(MINIMUM_FONT_SIZE,
                               min(new_size, self.opts.font_size * 5))
            return new_size

        if all_windows:
            current_global_size = global_font_size()
            new_size = calc_new_size(current_global_size)
            if new_size != current_global_size:
                global_font_size(new_size)
            os_windows = tuple(self.os_window_map.keys())
        else:
            os_windows = []
            w = self.active_window
            if w is not None:
                os_windows.append(w.os_window_id)
        if os_windows:
            final_windows = {}
            for wid in os_windows:
                current_size = os_window_font_size(wid)
                if current_size:
                    new_size = calc_new_size(current_size)
                    if new_size != current_size:
                        final_windows[wid] = new_size
            if final_windows:
                self._change_font_size(final_windows)

    def _change_font_size(self, sz_map):
        for os_window_id, sz in sz_map.items():
            tm = self.os_window_map.get(os_window_id)
            if tm is not None:
                os_window_font_size(os_window_id, sz)
                tm.resize()

    def on_dpi_change(self, os_window_id):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            sz = os_window_font_size(os_window_id)
            if sz:
                os_window_font_size(os_window_id, sz, True)
                tm.resize()

    def _set_os_window_background_opacity(self, os_window_id, opacity):
        change_background_opacity(os_window_id, max(0.1, min(opacity, 1.0)))

    def set_background_opacity(self, opacity):
        window = self.active_window
        if window is None or not opacity:
            return
        if not self.opts.dynamic_background_opacity:
            return self.show_error(
                _('Cannot change background opacity'),
                _('You must set the dynamic_background_opacity option in kitty.conf to be able to change background opacity'
                  ))
        os_window_id = window.os_window_id
        if opacity[0] in '+-':
            opacity = background_opacity_of(os_window_id)
            if opacity is None:
                return
            opacity += float(opacity)
        elif opacity == 'default':
            opacity = self.opts.background_opacity
        else:
            opacity = float(opacity)
        self._set_os_window_background_opacity(os_window_id, opacity)

    @property
    def active_tab_manager(self):
        os_window_id = current_os_window()
        return self.os_window_map.get(os_window_id)

    @property
    def active_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            return tm.active_tab

    @property
    def active_window(self):
        t = self.active_tab
        if t is not None:
            return t.active_window

    def dispatch_special_key(self, key, scancode, action, mods):
        # Handles shortcuts, return True if the key was consumed
        key_action = get_shortcut(self.opts.keymap, mods, key, scancode)
        if key_action is None:
            sequences = get_shortcut(self.opts.sequence_map, mods, key,
                                     scancode)
            if sequences:
                self.pending_sequences = sequences
                set_in_sequence_mode(True)
                return True
        else:
            self.current_key_press_info = key, scancode, action, mods
            return self.dispatch_action(key_action)

    def process_sequence(self, key, scancode, action, mods):
        if not self.pending_sequences:
            set_in_sequence_mode(False)

        remaining = {}
        matched_action = None
        for seq, key_action in self.pending_sequences.items():
            if shortcut_matches(seq[0], mods, key, scancode):
                seq = seq[1:]
                if seq:
                    remaining[seq] = key_action
                else:
                    matched_action = key_action

        if remaining:
            self.pending_sequences = remaining
        else:
            self.pending_sequences = None
            set_in_sequence_mode(False)
            if matched_action is not None:
                self.dispatch_action(matched_action)

    def start_resizing_window(self):
        w = self.active_window
        if w is None:
            return
        overlay_window = self._run_kitten(
            'resize_window',
            args=[
                '--horizontal-increment={}'.format(
                    self.opts.window_resize_step_cells),
                '--vertical-increment={}'.format(
                    self.opts.window_resize_step_lines)
            ])
        if overlay_window is not None:
            overlay_window.allow_remote_control = True

    def resize_layout_window(self,
                             window,
                             increment,
                             is_horizontal,
                             reset=False):
        tab = window.tabref()
        if tab is None or not increment:
            return False
        if reset:
            return tab.reset_window_sizes()
        return tab.resize_window_by(window.id, increment, is_horizontal)

    def default_bg_changed_for(self, window_id):
        w = self.window_id_map.get(window_id)
        if w is not None:
            tm = self.os_window_map.get(w.os_window_id)
            if tm is not None:
                t = tm.tab_for_id(w.tab_id)
                if t is not None:
                    t.relayout_borders()

    def dispatch_action(self, key_action):
        if key_action is not None:
            f = getattr(self, key_action.func, None)
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        tab = self.active_tab
        if tab is None:
            return False
        window = self.active_window
        if window is None:
            return False
        if key_action is not None:
            f = getattr(tab, key_action.func,
                        getattr(window, key_action.func, None))
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        return False

    def combine(self, *actions):
        for key_action in actions:
            self.dispatch_action(key_action)

    def on_focus(self, os_window_id, focused):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.focus_changed(focused)
            tm.mark_tab_bar_dirty()

    def update_tab_bar_data(self, os_window_id):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            tm.update_tab_bar_data()

    def on_drop(self, os_window_id, paths):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.paste('\n'.join(paths))

    def on_os_window_closed(self, os_window_id, viewport_width,
                            viewport_height):
        self.cached_values['window-size'] = viewport_width, viewport_height
        tm = self.os_window_map.pop(os_window_id, None)
        if tm is not None:
            tm.destroy()
        for window_id in tuple(
                w.id for w in self.window_id_map.values()
                if getattr(w, 'os_window_id', None) == os_window_id):
            self.window_id_map.pop(window_id, None)

    def display_scrollback(self, window, data):
        tab = self.active_tab
        if tab is not None and window.overlay_for is None:
            tab.new_special_window(
                SpecialWindow(self.opts.scrollback_pager,
                              data,
                              _('History'),
                              overlay_for=window.id))

    def edit_config_file(self, *a):
        confpath = prepare_config_file_for_editing()
        # On macOS vim fails to handle SIGWINCH if it occurs early, so add a
        # small delay.
        cmd = [
            'kitty', '+runpy',
            'import os, sys, time; time.sleep(0.05); os.execvp(sys.argv[1], sys.argv[1:])'
        ] + editor + [confpath]
        self.new_os_window(*cmd)

    def get_output(self, source_window, num_lines=1):
        output = ''
        s = source_window.screen
        if num_lines is None:
            num_lines = s.lines
        for i in range(min(num_lines, s.lines)):
            output += str(s.linebuf.line(i))
        return output

    def _run_kitten(self, kitten, args=(), input_data=None):
        w = self.active_window
        tab = self.active_tab
        if w is not None and tab is not None and w.overlay_for is None:
            orig_args, args = list(args), list(args)
            from kittens.runner import create_kitten_handler
            end_kitten = create_kitten_handler(kitten, orig_args)
            args[0:0] = [config_dir, kitten]
            if input_data is None:
                type_of_input = end_kitten.type_of_input
                if type_of_input in ('text', 'history', 'ansi', 'ansi-history',
                                     'screen', 'screen-history', 'screen-ansi',
                                     'screen-ansi-history'):
                    data = w.as_text(as_ansi='ansi' in type_of_input,
                                     add_history='history' in type_of_input,
                                     add_wrap_markers='screen'
                                     in type_of_input).encode('utf-8')
                elif type_of_input is None:
                    data = None
                else:
                    raise ValueError(
                        'Unknown type_of_input: {}'.format(type_of_input))
            else:
                data = input_data
            if isinstance(data, str):
                data = data.encode('utf-8')
            copts = {
                k: self.opts[k]
                for k in ('select_by_word_characters', 'open_url_with')
            }
            overlay_window = tab.new_special_window(
                SpecialWindow([
                    'kitty', '+runpy',
                    'from kittens.runner import main; main()'
                ] + args,
                              stdin=data,
                              env={
                                  'KITTY_COMMON_OPTS': json.dumps(copts),
                                  'PYTHONWARNINGS': 'ignore',
                                  'OVERLAID_WINDOW_LINES': str(w.screen.lines),
                                  'OVERLAID_WINDOW_COLS':
                                  str(w.screen.columns),
                              },
                              overlay_for=w.id))
            overlay_window.action_on_close = partial(self.on_kitten_finish,
                                                     w.id, end_kitten)
            return overlay_window

    def kitten(self, kitten, *args):
        import shlex
        cmdline = args[0] if args else ''
        args = shlex.split(cmdline) if cmdline else []
        self._run_kitten(kitten, args)

    def on_kitten_finish(self, target_window_id, end_kitten, source_window):
        output = self.get_output(source_window, num_lines=None)
        from kittens.runner import deserialize
        data = deserialize(output)
        if data is not None:
            end_kitten(data, target_window_id, self)

    def input_unicode_character(self):
        self._run_kitten('unicode_input')

    def set_tab_title(self):
        tab = self.active_tab
        if tab:
            args = [
                '--name=tab-title', '--message',
                _('Enter the new title for this tab below.'),
                'do_set_tab_title',
                str(tab.id)
            ]
            self._run_kitten('ask', args)

    def show_error(self, title, msg):
        self._run_kitten('show_error', ['--title', title], input_data=msg)

    def do_set_tab_title(self, title, tab_id):
        tm = self.active_tab_manager
        if tm is not None and title:
            tab_id = int(tab_id)
            for tab in tm.tabs:
                if tab.id == tab_id:
                    tab.set_title(title)
                    break

    def kitty_shell(self, window_type):
        cmd = ['kitty', '@']
        if window_type == 'tab':
            window = self._new_tab(cmd).active_window
        elif window_type == 'os_window':
            os_window_id = self._new_os_window(cmd)
            window = self.os_window_map[os_window_id].active_window
        elif window_type == 'overlay':
            w = self.active_window
            tab = self.active_tab
            if w is not None and tab is not None and w.overlay_for is None:
                window = tab.new_special_window(
                    SpecialWindow(cmd, overlay_for=w.id))
            else:
                window = None
        else:
            window = self._new_window(cmd)
        if window is not None:
            window.allow_remote_control = True

    def switch_focus_to(self, window_idx):
        tab = self.active_tab
        tab.set_active_window_idx(window_idx)

    def open_url(self, url, program=None, cwd=None):
        if url:
            if isinstance(program, str):
                program = to_cmdline(program)
            open_url(url, program or self.opts.open_url_with, cwd=cwd)

    def open_url_lines(self, lines, program=None):
        self.open_url(''.join(lines), program)

    def destroy(self):
        self.shutting_down = True
        self.child_monitor.shutdown_monitor()
        del self.child_monitor
        for tm in self.os_window_map.values():
            tm.destroy()
        self.os_window_map = {}
        destroy_global_data()

    def paste_to_active_window(self, text):
        if text:
            w = self.active_window
            if w is not None:
                w.paste(text)

    def paste_from_clipboard(self):
        text = get_clipboard_string()
        self.paste_to_active_window(text)

    def paste_from_selection(self):
        text = get_primary_selection(
        ) if supports_primary_selection else get_clipboard_string()
        self.paste_to_active_window(text)

    def set_primary_selection(self):
        w = self.active_window
        if w is not None and not w.destroyed:
            text = w.text_for_selection()
            if text:
                set_primary_selection(text)
                if self.opts.copy_on_select:
                    set_clipboard_string(text)

    def goto_tab(self, tab_num):
        tm = self.active_tab_manager
        if tm is not None:
            tm.goto_tab(tab_num - 1)

    def set_active_tab(self, tab):
        tm = self.active_tab_manager
        if tm is not None:
            tm.set_active_tab(tab)

    def next_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab()

    def previous_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab(-1)

    def args_to_special_window(self, args, cwd_from=None):
        args = list(args)
        stdin = None
        w = self.active_window

        def data_for_at(arg):
            if arg == '@selection':
                return w.text_for_selection()
            if arg == '@ansi':
                return w.as_text(as_ansi=True, add_history=True)
            if arg == '@text':
                return w.as_text(add_history=True)
            if arg == '@screen':
                return w.as_text()
            if arg == '@ansi_screen':
                return w.as_text(as_ansi=True)

        if args[0].startswith('@'):
            stdin = data_for_at(args[0]) or None
            if stdin is not None:
                stdin = stdin.encode('utf-8')
            del args[0]

        cmd = []
        for arg in args:
            if arg == '@selection':
                arg = data_for_at(arg)
                if not arg:
                    continue
            cmd.append(arg)
        return SpecialWindow(cmd, stdin, cwd_from=cwd_from)

    def _new_tab(self, args, cwd_from=None):
        special_window = None
        if args:
            if isinstance(args, SpecialWindowInstance):
                special_window = args
            else:
                special_window = self.args_to_special_window(args,
                                                             cwd_from=cwd_from)
        tm = self.active_tab_manager
        if tm is not None:
            return tm.new_tab(special_window=special_window, cwd_from=cwd_from)

    def new_tab(self, *args):
        self._new_tab(args)

    def new_tab_with_cwd(self, *args):
        w = self.active_window
        cwd_from = w.child.pid if w is not None else None
        self._new_tab(args, cwd_from=cwd_from)

    def _new_window(self, args, cwd_from=None):
        tab = self.active_tab
        if tab is not None:
            if args:
                return tab.new_special_window(
                    self.args_to_special_window(args, cwd_from=cwd_from))
            else:
                return tab.new_window(cwd_from=cwd_from)

    def new_window(self, *args):
        self._new_window(args)

    def new_window_with_cwd(self, *args):
        w = self.active_window
        if w is None:
            return self.new_window(*args)
        cwd_from = w.child.pid if w is not None else None
        self._new_window(args, cwd_from=cwd_from)

    def move_tab_forward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(1)

    def move_tab_backward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(-1)

    def patch_colors(self, spec, configured=False):
        if configured:
            for k, v in spec.items():
                if hasattr(self.opts, k):
                    setattr(self.opts, k, color_from_int(v))
        for tm in self.all_tab_managers:
            tm.tab_bar.patch_colors(spec)
Esempio n. 43
0
 def pop(self, key, *args):
     if isinstance(key, bytes):
         key = key.decode("utf-8")
     return WeakValueDictionary.pop(self, key, *args)
class _WeakValueDictionary(object):
    # Maps from OID -> Persistent object, but
    # only weakly references the Persistent object. This is similar
    # to ``weakref.WeakValueDictionary``, but is customized depending on the
    # platform. On PyPy, all objects can cheaply use a WeakRef, so that's
    # what we actually use. On CPython, though, ``PersistentPy`` cannot be weakly
    # referenced, so we rely on the fact that the ``id()`` of an object is its
    # memory location, and we use ``ctypes`` to cast that integer back to
    # the object.
    #
    # To remove stale addresses, we rely on the ``ffi.gc()`` object with the exact
    # same lifetime as the ``PersistentPy`` object. It calls us, we get the ``id``
    # back out of the CData, and clean up.
    if PYPY: # pragma: no cover
        def __init__(self):
            self._data = WeakValueDictionary()

        def _from_addr(self, addr):
            return addr

        def _save_addr(self, oid, obj):
            return obj

        cleanup_hook = None
    else:
        def __init__(self):
            # careful not to require ctypes at import time; most likely the
            # C implementation is in use.
            import ctypes

            self._data = {}
            self._addr_to_oid = {}
            self._cast = ctypes.cast
            self._py_object = ctypes.py_object

        def _save_addr(self, oid, obj):
            i = id(obj)
            self._addr_to_oid[i] = oid
            return i

        def _from_addr(self, addr):
            return self._cast(addr, self._py_object).value

        def cleanup_hook(self, cdata):
            # This is called during GC, possibly at interpreter shutdown
            # when the __dict__ of this object may have already been cleared.
            try:
                addr_to_oid = self._addr_to_oid
            except AttributeError:
                return
            oid = addr_to_oid.pop(cdata.pobj_id, None)
            self._data.pop(oid, None)

    def __contains__(self, oid):
        return oid in self._data

    def __len__(self):
        return len(self._data)

    def __setitem__(self, key, value):
        addr = self._save_addr(key, value)
        self._data[key] = addr

    def pop(self, oid):
        return self._from_addr(self._data.pop(oid))

    def items(self):
        from_addr = self._from_addr
        for oid, addr in self._data.items():
            yield oid, from_addr(addr)

    def get(self, oid, default=None):
        addr = self._data.get(oid, self)
        if addr is self:
            return default
        return self._from_addr(addr)

    def __getitem__(self, oid):
        addr = self._data[oid]
        return self._from_addr(addr)
Esempio n. 45
0
class EntityCache(object):
    """
    Cache for entities.

    Supports add and remove operations as well as lookup by ID and
    by slug.
    """
    def __init__(self, entities=None, allow_none_id=True):
        """
        :param bool allow_none_id: Flag specifying if calling :meth:`add`
            with an entity that does not have an ID is allowed.
        """
        # Flag indicating if None IDs are allowed in this cache.
        self.__allow_none_id = allow_none_id
        # List of cached entities. This is the only place we are holding a
        # real reference to the entity.
        if entities is None:
            entities = []
        self.__entities = entities
        # Dictionary mapping entity IDs to entities for fast lookup by ID.
        self.__id_map = WeakValueDictionary()
        # Dictionary mapping entity slugs to entities for fast lookup by slug.
        self.__slug_map = {}

    def get_by_id(self, entity_id):
        """
        Performs a lookup of an entity by its ID.

        :param int entity_id: entity ID.
        :return: entity found or ``None``.
        """
        return self.__id_map.get(entity_id)

    def has_id(self, entity_id):
        """
        Checks if this entity cache holds an entity with the given ID.

        :return: Boolean result of the check.
        """
        return entity_id in self.__id_map

    def get_by_slug(self, entity_slug):
        """
        Performs a lookup of an entity by its slug.

        :param str entity_id: entity slug.
        :return: entity found or ``None``.
        """
        return self.__slug_map.get(entity_slug)

    def has_slug(self, entity_slug):
        return entity_slug in self.__slug_map

    def add(self, entity):
        """
        Adds the given entity to this cache.

        :param entity: Entity to add.
        :type entity: Object implementing :class:`everest.interfaces.IEntity`.
        :raises ValueError: If the ID of the entity to add is ``None``
          (unless the `allow_none_id` constructor argument was set).
        """
        do_append = self.__check_new(entity)
        if do_append:
            self.__entities.append(entity)

    def remove(self, entity):
        """
        Removes the given entity from this cache.

        :param entity: Entity to remove.
        :type entity: Object implementing :class:`everest.interfaces.IEntity`.
        :raises KeyError: If the given entity is not in this cache.
        :raises ValueError: If the ID of the given entity is `None`.
        """
        self.__id_map.pop(entity.id, None)
        self.__slug_map.pop(entity.slug, None)
        self.__entities.remove(entity)

    def update(self, source_data, target_entity):
        """
        Updates the state of the target entity with the given source data.

        :param target_entity: Entity to update.
        :type target_entity: Object implementing
          :class:`everest.interfaces.IEntity`.
        """
        EntityState.set_state_data(target_entity, source_data)

    def get_all(self):
        """
        Returns the list of all entities in this cache in the order they
        were added.
        """
        return self.__entities

    def retrieve(self,
                 filter_expression=None,
                 order_expression=None,
                 slice_key=None):
        """
        Retrieve entities from this cache, possibly after filtering, ordering
        and slicing.
        """
        ents = iter(self.__entities)
        if not filter_expression is None:
            ents = filter_expression(ents)
        if not order_expression is None:
            # Ordering always involves a copy and conversion to a list, so
            # we have to wrap in an iterator.
            ents = iter(order_expression(ents))
        if not slice_key is None:
            ents = islice(ents, slice_key.start, slice_key.stop)
        return ents

    def rebuild(self, entities):
        """
        Rebuilds the ID and slug maps of this cache.

        This can be necessary when entities obtain their IDs only after
        they have been flushed to the backend.
        """
        for ent in entities:
            self.__check_new(ent)

    def __contains__(self, entity):
        if not entity.id is None:
            is_contained = entity.id in self.__id_map
        else:
            is_contained = entity in self.__entities
        return is_contained

    def __check_new(self, entity):
        # For certain use cases (e.g., staging), we do not want the entity to
        # be added to have an ID yet.
        do_append = True
        if not entity.id is None:
            if entity.id in self.__id_map:
                if not self.__id_map[entity.id] is entity:
                    raise ValueError('Duplicate entity ID "%s". %s' %
                                     (entity.id, entity))
                else:
                    do_append = False
            else:
                self.__id_map[entity.id] = entity
        elif not self.__allow_none_id:
            raise ValueError('Entity ID must not be None.')
        # The slug can be a lazy attribute depending on the
        # value of other (possibly not yet initialized) attributes which is
        # why we can not always assume it is available at this point.
        if do_append and hasattr(entity, 'slug') and not entity.slug is None:
            ents = self.__slug_map.get(entity.slug)
            if not ents is None:
                ents.append(entity)
            else:
                self.__slug_map[entity.slug] = WeakList([entity])
        return do_append
Esempio n. 46
0
class Agent(AgentT, Service):
    """Agent.

    This is the type of object returned by the ``@app.agent`` decorator.
    """

    # supervisor is None until the agent is started so we cast to simplify.
    supervisor: SupervisorStrategyT = cast(SupervisorStrategyT, None)

    # channel is loaded lazily on .channel property access
    # to make sure configuration is not accessed when agent created
    # at module-scope.
    _channel: Optional[ChannelT] = None
    _channel_arg: Optional[Union[str, ChannelT]]
    _channel_kwargs: Dict[str, Any]
    _channel_iterator: Optional[AsyncIterator] = None
    _sinks: List[SinkT]

    _actors: MutableSet[ActorRefT]
    _actor_by_partition: MutableMapping[TP, ActorRefT]

    #: This mutable set is used by the first agent we start,
    #: so that we can update its active_partitions later
    #: (in on_partitions_assigned, when we know what partitions we get).
    _pending_active_partitions: Optional[Set[TP]] = None

    _first_assignment_done: bool = False

    def __init__(
        self,
        fun: AgentFun,
        *,
        app: AppT,
        name: str = None,
        channel: Union[str, ChannelT] = None,
        concurrency: int = 1,
        sink: Iterable[SinkT] = None,
        on_error: AgentErrorHandler = None,
        supervisor_strategy: Type[SupervisorStrategyT] = None,
        help: str = None,
        schema: SchemaT = None,
        key_type: ModelArg = None,
        value_type: ModelArg = None,
        isolated_partitions: bool = False,
        use_reply_headers: bool = None,
        **kwargs: Any,
    ) -> None:
        self.app = app
        self.fun: AgentFun = fun
        self.name = name or canonshortname(self.fun)
        # key-type/value_type arguments only apply when a channel
        # is not set
        if schema is not None:
            assert channel is None or isinstance(channel, str)
        if key_type is not None:
            assert channel is None or isinstance(channel, str)
        self._key_type = key_type
        if value_type is not None:
            assert channel is None or isinstance(channel, str)
        self._schema = schema
        self._value_type = value_type
        self._channel_arg = channel
        self._channel_kwargs = kwargs
        self.concurrency = concurrency or 1
        self.isolated_partitions = isolated_partitions
        self.help = help or ""
        self._sinks = list(sink) if sink is not None else []
        self._on_error: Optional[AgentErrorHandler] = on_error
        self.supervisor_strategy = supervisor_strategy
        self._actors = WeakSet()
        self._actor_by_partition = WeakValueDictionary()
        if self.isolated_partitions and self.concurrency > 1:
            raise ImproperlyConfigured(
                "Agent concurrency must be 1 when using isolated partitions")
        self.use_reply_headers = use_reply_headers
        Service.__init__(self)

    def on_init_dependencies(self) -> Iterable[ServiceT]:
        """Return list of services dependencies required to start agent."""
        # Agent service is now a child of app.
        self.beacon.reattach(self.app.agents.beacon)
        return []

    def actor_tracebacks(self) -> List[str]:
        return [actor.traceback() for actor in self._actors]

    async def _start_one(
        self,
        *,
        index: Optional[int] = None,
        active_partitions: Optional[Set[TP]] = None,
        stream: StreamT = None,
        channel: ChannelT = None,
    ) -> ActorT:
        # an index of None means there's only one instance,
        # and `index is None` is used as a test by functions that
        # disallows concurrency.
        index = index if self.concurrency > 1 else None
        return await self._start_task(
            index=index,
            active_partitions=active_partitions,
            stream=stream,
            channel=channel,
            beacon=self.beacon,
        )

    async def _start_one_supervised(
        self,
        index: Optional[int] = None,
        active_partitions: Optional[Set[TP]] = None,
        stream: StreamT = None,
    ) -> ActorT:
        aref = await self._start_one(
            index=index,
            active_partitions=active_partitions,
            stream=stream,
        )
        self.supervisor.add(aref)
        await aref.maybe_start()
        return aref

    async def _start_for_partitions(self,
                                    active_partitions: Set[TP]) -> ActorT:
        assert active_partitions
        self.log.info("Starting actor for partitions %s", active_partitions)
        return await self._start_one_supervised(None, active_partitions)

    async def on_start(self) -> None:
        """Call when an agent starts."""
        self.supervisor = self._new_supervisor()
        await self._on_start_supervisor()

    def _new_supervisor(self) -> SupervisorStrategyT:
        return self._get_supervisor_strategy()(
            max_restarts=100.0,
            over=1.0,
            replacement=self._replace_actor,
            loop=self.loop,
            beacon=self.beacon,
        )

    async def _replace_actor(self, service: ServiceT, index: int) -> ServiceT:
        aref = cast(ActorRefT, service)
        return await self._start_one(
            index=index,
            active_partitions=aref.active_partitions,
            stream=aref.stream,
            channel=cast(ChannelT, aref.stream.channel),
        )

    def _get_supervisor_strategy(self) -> Type[SupervisorStrategyT]:
        SupervisorStrategy = self.supervisor_strategy
        if SupervisorStrategy is None:
            return cast(Type[SupervisorStrategyT],
                        self.app.conf.agent_supervisor)
        else:
            return SupervisorStrategy

    async def _on_start_supervisor(self) -> None:
        active_partitions = self._get_active_partitions()
        channel: ChannelT = cast(ChannelT, None)
        for i in range(self.concurrency):
            res = await self._start_one(
                index=i,
                active_partitions=active_partitions,
                channel=channel,
            )
            if channel is None:
                # First concurrency actor creates channel,
                # then we reuse it for --concurrency=n.
                # This way they share the same queue.
                channel = res.stream.channel
            self.supervisor.add(res)
        await self.supervisor.start()

    def _get_active_partitions(self) -> Optional[Set[TP]]:
        active_partitions: Optional[Set[TP]] = None
        if self.isolated_partitions:
            # when we start our first agent, we create the set of
            # partitions early, and save it in ._pending_active_partitions.
            # That way we can update the set once partitions are assigned,
            # and the actor we started may be assigned one of the partitions.
            active_partitions = self._pending_active_partitions = set()
        return active_partitions

    async def on_stop(self) -> None:
        """Call when an agent stops."""
        # Agents iterate over infinite streams, so we cannot wait for it
        # to stop.
        # Instead we cancel it and this forces the stream to ack the
        # last message processed (but not the message causing the error
        # to be raised).
        await self._stop_supervisor()
        with suppress(asyncio.CancelledError):
            await asyncio.gather(*[
                aref.actor_task for aref in self._actors
                if aref.actor_task is not None
            ])
        self._actors.clear()

    async def _stop_supervisor(self) -> None:
        if self.supervisor:
            await self.supervisor.stop()
            self.supervisor = cast(SupervisorStrategyT, None)

    def cancel(self) -> None:
        """Cancel agent and its actor instances running in this process."""
        for aref in self._actors:
            aref.cancel()

    async def on_partitions_revoked(self, revoked: Set[TP]) -> None:
        """Call when partitions are revoked."""
        T = traced_from_parent_span()
        if self.isolated_partitions:
            # isolated: start/stop actors for each partition
            await T(self.on_isolated_partitions_revoked)(revoked)
        else:
            await T(self.on_shared_partitions_revoked)(revoked)

    async def on_partitions_assigned(self, assigned: Set[TP]) -> None:
        """Call when partitions are assigned."""
        T = traced_from_parent_span()
        if self.isolated_partitions:
            await T(self.on_isolated_partitions_assigned)(assigned)
        else:
            await T(self.on_shared_partitions_assigned)(assigned)

    async def on_isolated_partitions_revoked(self, revoked: Set[TP]) -> None:
        """Call when isolated partitions are revoked."""
        self.log.dev("Partitions revoked")
        T = traced_from_parent_span()
        for tp in revoked:
            aref: Optional[ActorRefT] = self._actor_by_partition.pop(tp, None)
            if aref is not None:
                await T(aref.on_isolated_partition_revoked)(tp)

    async def on_isolated_partitions_assigned(self, assigned: Set[TP]) -> None:
        """Call when isolated partitions are assigned."""
        T = traced_from_parent_span()
        for tp in sorted(assigned):
            await T(self._assign_isolated_partition)(tp)

    async def _assign_isolated_partition(self, tp: TP) -> None:
        T = traced_from_parent_span()
        if not self._first_assignment_done and not self._actor_by_partition:
            self._first_assignment_done = True
            # if this is the first time we are assigned
            # we need to reassign the agent we started at boot to
            # one of the partitions.
            T(self._on_first_isolated_partition_assigned)(tp)
        await T(self._maybe_start_isolated)(tp)

    def _on_first_isolated_partition_assigned(self, tp: TP) -> None:
        assert self._actors
        assert len(self._actors) == 1
        self._actor_by_partition[tp] = next(iter(self._actors))
        if self._pending_active_partitions is not None:
            assert not self._pending_active_partitions
            self._pending_active_partitions.add(tp)

    async def _maybe_start_isolated(self, tp: TP) -> None:
        try:
            aref = self._actor_by_partition[tp]
        except KeyError:
            aref = await self._start_isolated(tp)
            self._actor_by_partition[tp] = aref
        await aref.on_isolated_partition_assigned(tp)

    async def _start_isolated(self, tp: TP) -> ActorT:
        return await self._start_for_partitions({tp})

    async def on_shared_partitions_revoked(self, revoked: Set[TP]) -> None:
        """Call when non-isolated partitions are revoked."""
        ...

    async def on_shared_partitions_assigned(self, assigned: Set[TP]) -> None:
        """Call when non-isolated partitions are assigned."""
        ...

    def info(self) -> Mapping:
        """Return agent attributes as a dictionary."""
        return {
            "app": self.app,
            "fun": self.fun,
            "name": self.name,
            "channel": self.channel,
            "concurrency": self.concurrency,
            "help": self.help,
            "sink": self._sinks,
            "on_error": self._on_error,
            "supervisor_strategy": self.supervisor_strategy,
            "isolated_partitions": self.isolated_partitions,
        }

    def clone(self, *, cls: Type[AgentT] = None, **kwargs: Any) -> AgentT:
        """Create clone of this agent object.

        Keyword arguments can be passed to override any argument
        supported by :class:`Agent.__init__ <Agent>`.
        """
        return (cls or type(self))(**{**self.info(), **kwargs})

    def test_context(
        self,
        channel: ChannelT = None,
        supervisor_strategy: SupervisorStrategyT = None,
        on_error: AgentErrorHandler = None,
        **kwargs: Any,
    ) -> AgentTestWrapperT:  # pragma: no cover
        """Create new unit-testing wrapper for this agent."""
        # flow control into channel queues are disabled at startup,
        # so need to resume that.
        self.app.flow_control.resume()

        async def on_agent_error(agent: AgentT, exc: BaseException) -> None:
            if on_error is not None:
                await on_error(agent, exc)
            await cast(AgentTestWrapper, agent).crash_test_agent(exc)

        return cast(
            AgentTestWrapperT,
            self.clone(
                cls=AgentTestWrapper,
                channel=channel if channel is not None else self.app.channel(),
                supervisor_strategy=supervisor_strategy or CrashingSupervisor,
                original_channel=self.channel,
                on_error=on_agent_error,
                **kwargs,
            ),
        )

    def _prepare_channel(
        self,
        channel: Union[str, ChannelT] = None,
        internal: bool = True,
        schema: SchemaT = None,
        key_type: ModelArg = None,
        value_type: ModelArg = None,
        **kwargs: Any,
    ) -> ChannelT:
        app = self.app
        has_prefix = False
        if channel is None:
            channel = f"{app.conf.id}-{self.name}"
            has_prefix = True
        if isinstance(channel, ChannelT):
            return channel
        elif isinstance(channel, str):
            return app.topic(
                channel,
                internal=internal,
                schema=schema,
                key_type=key_type,
                value_type=value_type,
                has_prefix=has_prefix,
                **kwargs,
            )
        raise TypeError(
            f"Channel must be channel, topic, or str; not {type(channel)}")

    def __call__(
        self,
        *,
        index: int = None,
        active_partitions: Set[TP] = None,
        stream: StreamT = None,
        channel: ChannelT = None,
    ) -> ActorRefT:
        """Create new actor instance for this agent."""
        # The agent function can be reused by other agents/tasks.
        # For example:
        #
        #   @app.agent(logs_topic, through='other-topic')
        #   filter_log_errors_(stream):
        #       async for event in stream:
        #           if event.severity == 'error':
        #               yield event
        #
        #   @app.agent(logs_topic)
        #   def alert_on_log_error(stream):
        #       async for event in filter_log_errors(stream):
        #            alert(f'Error occurred: {event!r}')
        #
        # Calling `res = filter_log_errors(it)` will end you up with
        # an AsyncIterable that you can reuse (but only if the agent
        # function is an `async def` function that yields)
        return self.actor_from_stream(stream,
                                      index=index,
                                      active_partitions=active_partitions,
                                      channel=channel)

    def actor_from_stream(
        self,
        stream: Optional[StreamT],
        *,
        index: int = None,
        active_partitions: Set[TP] = None,
        channel: ChannelT = None,
    ) -> ActorRefT:
        """Create new actor from stream."""
        we_created_stream = False
        actual_stream: StreamT
        if stream is None:
            actual_stream = self.stream(
                channel=channel,
                concurrency_index=index,
                active_partitions=active_partitions,
            )
            we_created_stream = True
        else:
            # reusing actor stream after agent restart
            assert stream.concurrency_index == index
            assert stream.active_partitions == active_partitions
            actual_stream = stream

        res = self.fun(actual_stream)
        if isinstance(res, AsyncIterable):
            if we_created_stream:
                actual_stream.add_processor(self._maybe_unwrap_reply_request)
            return cast(
                ActorRefT,
                AsyncIterableActor(
                    self,
                    actual_stream,
                    res,
                    index=actual_stream.concurrency_index,
                    active_partitions=actual_stream.active_partitions,
                    loop=self.loop,
                    beacon=self.beacon,
                ),
            )
        else:
            return cast(
                ActorRefT,
                AwaitableActor(
                    self,
                    actual_stream,
                    res,
                    index=actual_stream.concurrency_index,
                    active_partitions=actual_stream.active_partitions,
                    loop=self.loop,
                    beacon=self.beacon,
                ),
            )

    def add_sink(self, sink: SinkT) -> None:
        """Add new sink to further handle results from this agent."""
        if sink not in self._sinks:
            self._sinks.append(sink)

    def stream(self,
               channel: ChannelT = None,
               active_partitions: Set[TP] = None,
               **kwargs: Any) -> StreamT:
        """Create underlying stream used by this agent."""
        if channel is None:
            channel = cast(TopicT, self.channel_iterator).clone(
                is_iterator=False,
                active_partitions=active_partitions,
            )
        if active_partitions is not None:
            assert channel.active_partitions == active_partitions
        s = self.app.stream(
            channel,
            loop=self.loop,
            active_partitions=active_partitions,
            prefix=self.name,
            beacon=self.beacon,
            **kwargs,
        )
        return s

    def _maybe_unwrap_reply_request(self, value: V) -> Any:
        if isinstance(value, ReqRepRequest):
            return value.value
        return value

    async def _start_task(
        self,
        *,
        index: Optional[int],
        active_partitions: Optional[Set[TP]] = None,
        stream: StreamT = None,
        channel: ChannelT = None,
        beacon: NodeT = None,
    ) -> ActorRefT:
        # If the agent is an async function we simply start it,
        # if it returns an AsyncIterable/AsyncGenerator we start a task
        # that will consume it.
        actor = self(
            index=index,
            active_partitions=active_partitions,
            stream=stream,
            channel=channel,
        )
        return await self._prepare_actor(
            actor, beacon if beacon is not None else self.beacon)

    async def _prepare_actor(self, aref: ActorRefT,
                             beacon: NodeT) -> ActorRefT:
        coro: Any
        if isinstance(aref, Awaitable):
            # agent does not yield
            coro = aref
            if self._sinks:
                raise ImproperlyConfigured("Agent must yield to use sinks")
        else:
            # agent yields and is an AsyncIterator so we have to consume it.
            coro = self._slurp(aref, aiter(aref))
        task = asyncio.Task(self._execute_actor(coro, aref), loop=self.loop)
        task._beacon = beacon  # type: ignore
        aref.actor_task = task
        self._actors.add(aref)
        return aref

    async def _execute_actor(self, coro: Awaitable, aref: ActorRefT) -> None:
        # This executes the agent task itself, and does exception handling.
        _current_agent.set(self)
        try:
            await coro
        except asyncio.CancelledError:
            if self.should_stop:
                raise
        except Exception as exc:
            if self._on_error is not None:
                await self._on_error(self, exc)

            # Mark ActorRef as dead, so that supervisor thread
            # can start a new one.
            await aref.crash(exc)
            self.supervisor.wakeup()

    async def _slurp(self, res: ActorRefT, it: AsyncIterator) -> None:
        # this is used when the agent returns an AsyncIterator,
        # and simply consumes that async iterator.
        stream: Optional[StreamT] = None
        async for value in it:
            self.log.debug("%r yielded: %r", self.fun, value)
            if stream is None:
                stream = res.stream.get_active_stream()
            event = stream.current_event
            if event is not None:
                headers = event.headers
                reply_to: Optional[str] = None
                correlation_id: Optional[str] = None
                if isinstance(event.value, ReqRepRequest):
                    req: ReqRepRequest = event.value
                    reply_to = req.reply_to
                    correlation_id = req.correlation_id
                elif headers:
                    reply_to_bytes = headers.get("Faust-Ag-ReplyTo")
                    if reply_to_bytes:
                        reply_to = want_str(reply_to_bytes)
                        correlation_id_bytes = headers.get(
                            "Faust-Ag-CorrelationId")
                        if correlation_id_bytes:
                            correlation_id = want_str(correlation_id_bytes)
                if reply_to is not None:
                    await self._reply(event.key, value, reply_to,
                                      cast(str, correlation_id))
            await self._delegate_to_sinks(value)

    async def _delegate_to_sinks(self, value: Any) -> None:
        for sink in self._sinks:
            if isinstance(sink, AgentT):
                await sink.send(value=value)
            elif isinstance(sink, ChannelT):
                await cast(TopicT, sink).send(value=value)
            else:
                await maybe_async(cast(Callable, sink)(value))

    async def _reply(self, key: Any, value: Any, reply_to: str,
                     correlation_id: str) -> None:
        assert reply_to
        response = self._response_class(value)(
            key=key,
            value=value,
            correlation_id=correlation_id,
        )
        await self.app.send(
            reply_to,
            key=None,
            value=response,
        )

    def _response_class(self, value: Any) -> Type[ReqRepResponse]:
        if isinstance(value, ModelT):
            return ModelReqRepResponse
        return ReqRepResponse

    async def cast(
        self,
        value: V = None,
        *,
        key: K = None,
        partition: int = None,
        timestamp: float = None,
        headers: HeadersArg = None,
    ) -> None:
        """RPC operation: like :meth:`ask` but do not expect reply.

        Cast here is like "casting a spell", and will not expect
        a reply back from the agent.
        """
        await self.send(
            key=key,
            value=value,
            partition=partition,
            timestamp=timestamp,
            headers=headers,
        )

    async def ask(
        self,
        value: V = None,
        *,
        key: K = None,
        partition: int = None,
        timestamp: float = None,
        headers: HeadersArg = None,
        reply_to: ReplyToArg = None,
        correlation_id: str = None,
    ) -> Any:
        """RPC operation: ask agent for result of processing value.

        This version will wait until the result is available
        and return the processed value.
        """
        p = await self.ask_nowait(
            value,
            key=key,
            partition=partition,
            timestamp=timestamp,
            headers=headers,
            reply_to=reply_to or self.app.conf.reply_to,
            correlation_id=correlation_id,
            force=True,  # Send immediately, since we are waiting for result.
        )
        app = cast(_App, self.app)
        await app._reply_consumer.add(p.correlation_id, p)
        await app.maybe_start_client()
        return await p

    async def ask_nowait(
        self,
        value: V = None,
        *,
        key: K = None,
        partition: int = None,
        timestamp: float = None,
        headers: HeadersArg = None,
        reply_to: ReplyToArg = None,
        correlation_id: str = None,
        force: bool = False,
    ) -> ReplyPromise:
        """RPC operation: ask agent for result of processing value.

        This version does not wait for the result to arrive,
        but instead returns a promise of future evaluation.
        """
        if reply_to is None:
            raise TypeError("Missing reply_to argument")
        reply_to = self._get_strtopic(reply_to)
        correlation_id = correlation_id or str(uuid4())
        value, headers = self._create_req(key, value, reply_to, correlation_id,
                                          headers)
        await self.channel.send(
            key=key,
            value=value,
            partition=partition,
            timestamp=timestamp,
            headers=headers,
            force=force,
        )
        return ReplyPromise(reply_to, correlation_id)

    def _create_req(
        self,
        key: K = None,
        value: V = None,
        reply_to: ReplyToArg = None,
        correlation_id: str = None,
        headers: HeadersArg = None,
    ) -> Tuple[V, Optional[HeadersArg]]:
        if reply_to is None:
            raise TypeError("Missing reply_to argument")
        topic_name = self._get_strtopic(reply_to)
        correlation_id = correlation_id or str(uuid4())
        open_headers = prepare_headers(headers or {})
        if self.use_reply_headers:
            merge_headers(
                open_headers,
                {
                    "Faust-Ag-ReplyTo": want_bytes(topic_name),
                    "Faust-Ag-CorrelationId": want_bytes(correlation_id),
                },
            )
            return value, open_headers
        else:
            # wrap value in envelope
            req = self._request_class(value)(
                value=value,
                reply_to=topic_name,
                correlation_id=correlation_id,
            )
            return req, open_headers

    def _request_class(self, value: V) -> Type[ReqRepRequest]:
        if isinstance(value, ModelT):
            return ModelReqRepRequest
        return ReqRepRequest

    async def send(
        self,
        *,
        key: K = None,
        value: V = None,
        partition: int = None,
        timestamp: float = None,
        headers: HeadersArg = None,
        key_serializer: CodecArg = None,
        value_serializer: CodecArg = None,
        callback: MessageSentCallback = None,
        reply_to: ReplyToArg = None,
        correlation_id: str = None,
        force: bool = False,
    ) -> Awaitable[RecordMetadata]:
        """Send message to topic used by agent."""
        if reply_to:
            value, headers = self._create_req(key, value, reply_to,
                                              correlation_id, headers)
        return await self.channel.send(
            key=key,
            value=value,
            partition=partition,
            timestamp=timestamp,
            headers=headers,
            key_serializer=key_serializer,
            value_serializer=value_serializer,
            force=force,
        )

    def _get_strtopic(self, topic: Union[str, ChannelT, TopicT,
                                         AgentT]) -> str:
        if isinstance(topic, AgentT):
            return self._get_strtopic(topic.channel)
        if isinstance(topic, TopicT):
            return topic.get_topic_name()
        if isinstance(topic, ChannelT):
            raise ValueError("Channels are unnamed topics")
        return topic

    async def map(
        self,
        values: Union[AsyncIterable, Iterable],
        key: K = None,
        reply_to: ReplyToArg = None,
    ) -> AsyncIterator:  # pragma: no cover
        """RPC map operation on a list of values.

        A map operation iterates over results as they arrive.
        See :meth:`join` and :meth:`kvjoin` if you want them in order.
        """
        # Map takes only values, but can provide one key that is used for all.
        async for value in self.kvmap(((key, v) async for v in aiter(values)),
                                      reply_to):
            yield value

    async def kvmap(
        self,
        items: Union[AsyncIterable[Tuple[K, V]], Iterable[Tuple[K, V]]],
        reply_to: ReplyToArg = None,
    ) -> AsyncIterator[str]:  # pragma: no cover
        """RPC map operation on a list of ``(key, value)`` pairs.

        A map operation iterates over results as they arrive.
        See :meth:`join` and :meth:`kvjoin` if you want them in order.
        """
        # kvmap takes (key, value) pairs.
        reply_to = self._get_strtopic(reply_to or self.app.conf.reply_to)

        # BarrierState is the promise that keeps track of pending results.
        # It contains a list of individual ReplyPromises.
        barrier = BarrierState(reply_to)

        async for _ in self._barrier_send(barrier, items, reply_to):
            # Now that we've sent a message, try to see if we have any
            # replies.
            try:
                _, val = barrier.get_nowait()
            except asyncio.QueueEmpty:
                pass
            else:
                yield val
        # All the messages have been sent so finalize the barrier.
        barrier.finalize()

        # Then iterate over the results in the group.
        async for _, value in barrier.iterate():
            yield value

    async def join(
        self,
        values: Union[AsyncIterable[V], Iterable[V]],
        key: K = None,
        reply_to: ReplyToArg = None,
    ) -> List[Any]:  # pragma: no cover
        """RPC map operation on a list of values.

        A join returns the results in order, and only returns once
        all values have been processed.
        """
        return await self.kvjoin(
            ((key, value) async for value in aiter(values)),
            reply_to=reply_to,
        )

    async def kvjoin(
        self,
        items: Union[AsyncIterable[Tuple[K, V]], Iterable[Tuple[K, V]]],
        reply_to: ReplyToArg = None,
    ) -> List[Any]:  # pragma: no cover
        """RPC map operation on list of ``(key, value)`` pairs.

        A join returns the results in order, and only returns once
        all values have been processed.
        """
        reply_to = self._get_strtopic(reply_to or self.app.conf.reply_to)
        barrier = BarrierState(reply_to)

        # Map correlation_id -> index
        posindex: MutableMapping[str, int] = {
            cid: i
            async for i, cid in aenumerate(
                self._barrier_send(barrier, items, reply_to))
        }

        # All the messages have been sent so finalize the barrier.
        barrier.finalize()

        # wait until all replies received
        await barrier
        # then construct a list in the correct order.
        values: List = [None] * barrier.total
        async for correlation_id, value in barrier.iterate():
            values[posindex[correlation_id]] = value
        return values

    async def _barrier_send(
        self,
        barrier: BarrierState,
        items: Union[AsyncIterable[Tuple[K, V]], Iterable[Tuple[K, V]]],
        reply_to: ReplyToArg,
    ) -> AsyncIterator[str]:  # pragma: no cover
        # map: send many tasks to agents
        # while trying to pop incoming results off.
        key: K
        value: V
        async for key, value in aiter(items):  # type: ignore
            correlation_id = str(uuid4())
            p = await self.ask_nowait(key=key,
                                      value=value,
                                      reply_to=reply_to,
                                      correlation_id=correlation_id)
            # add reply promise to the barrier
            barrier.add(p)

            # the ReplyConsumer will call the barrier whenever a new
            # result comes in.
            app = cast(_App, self.app)
            await app.maybe_start_client()
            await app._reply_consumer.add(p.correlation_id, barrier)

            yield correlation_id

    def _repr_info(self) -> str:
        return shorten_fqdn(self.name)

    def get_topic_names(self) -> Iterable[str]:
        """Return list of topic names this agent subscribes to."""
        channel = self.channel
        if isinstance(channel, TopicT):
            return channel.topics
        return []

    @property
    def channel(self) -> ChannelT:
        """Return channel used by agent."""
        if self._channel is None:
            self._channel = self._prepare_channel(
                self._channel_arg,
                schema=self._schema,
                key_type=self._key_type,
                value_type=self._value_type,
                **self._channel_kwargs,
            )
        return self._channel

    @channel.setter
    def channel(self, channel: ChannelT) -> None:
        self._channel = channel

    @property
    def channel_iterator(self) -> AsyncIterator:
        """Return channel agent iterates over."""
        # The channel is "memoized" here, so subsequent access to
        # instance.channel_iterator will return the same value.
        if self._channel_iterator is None:
            # we do not use aiter(channel) here, because
            # that will also add it to the topic conductor too early.
            self._channel_iterator = self.channel.clone(is_iterator=False)
        return self._channel_iterator

    @channel_iterator.setter
    def channel_iterator(self, it: AsyncIterator) -> None:
        self._channel_iterator = it

    @property
    def label(self) -> str:
        """Return human-readable description of agent."""
        return self._agent_label()

    def _agent_label(self, name_suffix: str = "") -> str:
        s = f"{type(self).__name__}{name_suffix}: "
        s += f"{shorten_fqdn(qualname(self.fun))}"
        return s

    @property
    def shortlabel(self) -> str:
        """Return short description of agent."""
        return self._agent_label()
Esempio n. 47
0
class TaskManager(object):
    """
    Provides a set of tools to maintain a list of asyncio Tasks that are to be
    executed during the lifetime of an arbitrary object, usually getting killed with it.
    """
    def __init__(self):
        self._pending_tasks = WeakValueDictionary()
        self._task_lock = RLock()
        self._shutdown = False
        self._counter = 0
        self._logger = logging.getLogger(self.__class__.__name__)

        self._checker = self.register_task('_check_tasks',
                                           self._check_tasks,
                                           interval=MAX_TASK_AGE,
                                           delay=MAX_TASK_AGE * 1.5)

    def _check_tasks(self):
        now = time.time()
        for name, task in self._pending_tasks.items():
            if not task.interval and now - task.start_time > MAX_TASK_AGE:
                self._logger.warning(
                    'Non-interval task "%s" has been running for %.2f!', name,
                    now - task.start_time)

    def replace_task(self, name, *args, **kwargs):
        """
        Replace named task with the new one, cancelling the old one in the process.
        """
        new_task = Future()

        def cancel_cb(_):
            try:
                new_task.set_result(self.register_task(name, *args, **kwargs))
            except Exception as e:
                new_task.set_exception(e)

        old_task = self.cancel_pending_task(name)
        old_task.add_done_callback(cancel_cb)
        return new_task

    def register_task(self,
                      name,
                      task,
                      *args,
                      delay=None,
                      interval=None,
                      ignore=()):
        """
        Register a Task/(coroutine)function so it can be canceled at shutdown time or by name.
        """
        if not isinstance(
                task,
                Task) and not iscoroutinefunction(task) and not callable(task):
            raise ValueError(
                'Register_task takes a Task or a (coroutine)function as a parameter'
            )
        if (interval or delay) and isinstance(task, Task):
            raise ValueError('Cannot run Task at an interval or with a delay')
        if not isinstance(ignore, tuple) or not all(
            (issubclass(e, Exception) for e in ignore)):
            raise ValueError('Ignore should be a tuple of Exceptions or None')

        with self._task_lock:
            if self._shutdown:
                self._logger.warning("Not adding task %s due to shutdown!",
                                     str(task))
                if isinstance(task, (Task, Future)):
                    if not task.done():
                        task.cancel()
                return task

            if self.is_pending_task_active(name):
                raise RuntimeError("Task already exists: '%s'" % name)

            if iscoroutinefunction(task) or callable(task):
                task = task if iscoroutinefunction(task) else coroutine(task)
                if interval:
                    # The default delay for looping calls is the same as the interval
                    delay = interval if delay is None else delay
                    task = ensure_future(
                        interval_runner(delay, interval, task, *args))
                elif delay:
                    task = ensure_future(delay_runner(delay, task, *args))
                else:
                    task = ensure_future(task(*args))
            # Since weak references to list/tuple are not allowed, we're not storing start_time/interval
            # in _pending_tasks. Instead we add them as attributes to the task.
            task.start_time = time.time()
            task.interval = interval

            assert isinstance(task, Task)

            def done_cb(future):
                self._pending_tasks.pop(name, None)
                try:
                    future.result()
                except CancelledError:
                    pass
                except ignore as e:
                    self._logger.error('Task resulted in error: %s', e)

            self._pending_tasks[name] = task
            task.add_done_callback(done_cb)
            return task

    def register_anonymous_task(self, basename, task, *args, **kwargs):
        """
        Wrapper for register_task to derive a unique name from the basename.
        """
        self._counter += 1
        return self.register_task(basename + ' ' + str(self._counter), task,
                                  *args, **kwargs)

    def cancel_pending_task(self, name):
        """
        Cancels the named task
        """
        with self._task_lock:
            task = self._pending_tasks.get(name, None)
            if not task:
                return succeed(None)

            if not task.done():
                task.cancel()
                self._pending_tasks.pop(name, None)
            return task

    def cancel_all_pending_tasks(self):
        """
        Cancels all the registered tasks.
        This usually should be called when stopping or destroying the object so no tasks are left floating around.
        """
        with self._task_lock:
            assert all([
                isinstance(t, (Task, Future))
                for t in self._pending_tasks.values()
            ]), self._pending_tasks
            return [
                self.cancel_pending_task(name)
                for name in list(self._pending_tasks.keys())
            ]

    def is_pending_task_active(self, name):
        """
        Return a boolean determining if a task is active.
        """
        with self._task_lock:
            task = self._pending_tasks.get(name, None)
            return not task.done() if task else False

    def get_tasks(self):
        """
        Returns a list of all registered tasks, excluding tasks the are created by the TaskManager itself.
        """
        with self._task_lock:
            return [
                t for t in self._pending_tasks.values() if t != self._checker
            ]

    async def wait_for_tasks(self):
        """
        Waits until all registered tasks are done.
        """
        with self._task_lock:
            tasks = self.get_tasks()
            if tasks:
                await gather(*tasks, return_exceptions=True)

    async def shutdown_task_manager(self):
        """
        Clear the task manager, cancel all pending tasks and disallow new tasks being added.
        """
        with self._task_lock:
            self._shutdown = True
            tasks = self.cancel_all_pending_tasks()
            if tasks:
                with suppress(CancelledError):
                    await gather(*tasks)
Esempio n. 48
0
class RedisStore():
    def __init__(self, db_host, db_port, db_num, db_pw):
        self.pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw,
                                   decode_responses=True)
        self.redis = StrictRedis(connection_pool=self.pool)
        self.redis.ping()
        self._object_map = WeakValueDictionary()

    def create_object(self, dbo_class, dbo_dict, update_timestamp=True):
        dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type', dbo_class))
        if not dbo_class:
            return
        try:
            dbo_id = dbo_dict['dbo_id']
        except KeyError:
            dbo_id, dbo_dict = dbo_dict, {}
        if dbo_id is None or dbo_id == '':
            warn("create_object called with empty dbo_id")
            return
        dbo_id = str(dbo_id).lower()
        if self.object_exists(dbo_class.dbo_key_type, dbo_id):
            raise ObjectExistsError(dbo_id)
        dbo = dbo_class()
        dbo.dbo_id = dbo_id
        dbo.hydrate(dbo_dict)
        dbo.db_created()
        if dbo.dbo_set_key:
            self.redis.sadd(dbo.dbo_set_key, dbo.dbo_id)
        self.save_object(dbo, update_timestamp)
        return dbo

    def save_object(self, dbo, update_timestamp=False, autosave=False):
        if update_timestamp:
            dbo.dbo_ts = int(time.time())
        if dbo.dbo_indexes:
            self._update_indexes(dbo)
        self._clear_old_refs(dbo)
        save_root, new_refs = dbo.to_db_value()
        self.redis.set(dbo.dbo_key, json_encode(save_root))
        if new_refs:
            self._set_new_refs(dbo, new_refs)
        debug("db object {} {}saved", dbo.dbo_key, "auto" if autosave else "")
        self._object_map[dbo.dbo_key] = dbo
        return dbo

    def save_raw(self, key, raw):
        self.redis.set(key, json_encode(raw))

    def load_raw(self, key, default=None):
        json = self.redis.get(key)
        if json:
            return json_decode(json)
        return default

    def load_cached(self, dbo_key):
        return self._object_map.get(dbo_key)

    def load_object(self, dbo_key, key_type=None, silent=False):
        if key_type:
            try:
                key_type = key_type.dbo_key_type
            except AttributeError:
                pass
            try:
                dbo_key, dbo_id = ':'.join([key_type, dbo_key]), dbo_key
            except TypeError:
                if not silent:
                    exception("Invalid dbo_key passed to load_object", stack_info=True)
                return
        else:
            key_type, _, dbo_id = dbo_key.partition(':')
        cached_dbo = self._object_map.get(dbo_key)
        if cached_dbo:
            return cached_dbo
        json_str = self.redis.get(dbo_key)
        if not json_str:
            if not silent:
                warn("Failed to find {} in database", dbo_key)
            return
        return self.load_from_json(json_str, key_type, dbo_id)

    def load_from_json(self, json_str, key_type, dbo_id):
        dbo_dict = json_decode(json_str)
        dbo = get_mixed_type(key_type, dbo_dict.get('mixins'))()
        dbo.dbo_id = dbo_id
        self._object_map[dbo.dbo_key] = dbo
        dbo.hydrate(dbo_dict)
        return dbo

    def object_exists(self, obj_type, obj_id):
        return self.redis.exists('{}:{}'.format(obj_type, obj_id))

    def load_object_set(self, dbo_class, set_key=None):
        key_type = dbo_class.dbo_key_type
        if not set_key:
            set_key = dbo_class.dbo_set_key
        results = set()
        keys = deque()
        pipeline = self.redis.pipeline()
        for key in self.fetch_set_keys(set_key):
            dbo_key = ':'.join([key_type, key])
            try:
                results.add(self._object_map[dbo_key])
            except KeyError:
                keys.append(key)
                pipeline.get(dbo_key)
        for dbo_id, json_str in zip(keys, pipeline.execute()):
            if json_str:
                obj = self.load_from_json(json_str, key_type, dbo_id)
                if obj:
                    results.add(obj)
                continue
            warn("Removing missing object from set {}", set_key)
            self.delete_set_key(set_key, dbo_id)
        return results

    def delete_object_set(self, dbo_class, set_key=None):
        if not set_key:
            set_key = dbo_class.dbo_set_key
        for dbo in self.load_object_set(dbo_class, set_key):
            self.delete_object(dbo)
        self.delete_key(set_key)

    def update_object(self, dbo, dbo_dict):
        dbo.hydrate(dbo_dict)
        return self.save_object(dbo, True)

    def delete_object(self, dbo):
        key = dbo.dbo_key
        dbo.db_deleted()
        self.delete_key(key)
        self._clear_old_refs(dbo)
        if dbo.dbo_set_key:
            self.redis.srem(dbo.dbo_set_key, dbo.dbo_id)
        for children_type in dbo.dbo_children_types:
            self.delete_object_set(get_dbo_class(children_type),
                                   "{}_{}s:{}".format(dbo.dbo_key_type, children_type, dbo.dbo_id))
        for ix_name in dbo.dbo_indexes:
            ix_value = getattr(dbo, ix_name, None)
            if ix_value is not None and ix_value != '':
                self.delete_index('ix:{}:{}'.format(dbo.dbo_key_type, ix_name), ix_value)
        debug("object deleted: {}", key)
        self.evict_object(dbo)

    def reload_object(self, dbo_key):
        dbo = self._object_map.get(dbo_key)
        if dbo:
            json_str = self.redis.get(dbo_key)
            if not json_str:
                warn("Failed to find {} in database for reload", dbo_key)
                return None
            return self.update_object(dbo, json_decode(json_str))
        return self.load_object(dbo_key)

    def evict_object(self, dbo):
        self._object_map.pop(dbo.dbo_key, None)

    def fetch_set_keys(self, set_key):
        return self.redis.smembers(set_key)

    def add_set_key(self, set_key, *values):
        self.redis.sadd(set_key, *values)

    def delete_set_key(self, set_key, value):
        self.redis.srem(set_key, value)

    def set_key_exists(self, set_key, value):
        return self.redis.sismember(set_key, value)

    def db_counter(self, counter_id, inc=1):
        return self.redis.incr("counter:{}".format(counter_id), inc)

    def delete_key(self, key):
        self.redis.delete(key)

    def set_index(self, index_name, key, value):
        return self.redis.hset(index_name, key, value)

    def get_index(self, index_name, key):
        return self.redis.hget(index_name, key)

    def get_full_index(self, index_name):
        return self.redis.hgetall(index_name)

    def delete_index(self, index_name, key):
        return self.redis.hdel(index_name, key)

    def get_all_hash(self, index_name):
        return {key: json_decode(value) for key, value in self.redis.hgetall(index_name).items()}

    def set_db_hash(self, hash_id, hash_key, value):
        return self.redis.hset(hash_id, hash_key, json_encode(value))

    def get_db_hash(self, hash_id, hash_key):
        return json_decode(self.redis.hget(hash_id, hash_key))

    def remove_db_hash(self, hash_id, hash_key):
        self.redis.hdel(hash_id, hash_key)

    def get_all_db_hash(self, hash_id):
        return [json_decode(value) for value in self.redis.hgetall(hash_id).values()]

    def get_db_list(self, list_id, start=0, end=-1):
        return [json_decode(value) for value in self.redis.lrange(list_id, start, end)]

    def add_db_list(self, list_id, value):
        self.redis.lpush(list_id, json_encode(value))

    def trim_db_list(self, list_id, start, end):
        return self.redis.ltrim(list_id, start, end)

    def _update_indexes(self, dbo):
        try:
            old_dbo = json_decode(self.redis.get(dbo.dbo_key))
        except TypeError:
            old_dbo = None

        for ix_name in dbo.dbo_indexes:
            new_val = getattr(dbo, ix_name, None)
            old_val = old_dbo.get(ix_name) if old_dbo else None
            if old_val == new_val:
                continue
            ix_key = 'ix:{}:{}'.format(dbo.dbo_key_type, ix_name)
            if old_val is not None:
                self.delete_index(ix_key, old_val)
            if new_val is not None and new_val != '':
                if self.get_index(ix_key, new_val):
                    raise NonUniqueError(ix_key, new_val)
                self.set_index(ix_key, new_val, dbo.dbo_id)

    def _clear_old_refs(self, dbo):
        dbo_key = dbo.dbo_key
        ref_key = '{}:refs'.format(dbo_key)
        for ref_id in self.fetch_set_keys(ref_key):
            holder_key = '{}:holders'.format(ref_id)
            self.delete_set_key(holder_key, dbo_key)
        self.delete_key(ref_key)

    def _set_new_refs(self, dbo, new_refs):
        dbo_key = dbo.dbo_key
        self.add_set_key("{}:refs".format(dbo_key), *new_refs)
        for ref_id in new_refs:
            holder_key = '{}:holders'.format(ref_id)
            self.add_set_key(holder_key, dbo_key)
Esempio n. 49
0
class EntityCache(object):
    """
    Cache for entities.
    
    Supports add and remove operations as well as lookup by ID and 
    by slug.
    """
    def __init__(self, allow_none_id=False):
        """
        :param bool allow_none_id: Flag specifying if calling :meth:`add`
            with an entity that does not have an ID is allowed.
        """
        #
        self.__allow_none_id = allow_none_id
        # List of cached entities. This is the only place we are holding a
        # real reference to the entity.
        self.__entities = []
        # Dictionary mapping entity IDs to entities for fast lookup by ID.
        self.__id_map = WeakValueDictionary()
        # Dictionary mapping entity slugs to entities for fast lookup by slug.
        self.__slug_map = WeakValueDictionary()

    def get_by_id(self, entity_id):
        """
        Performs a lookup of an entity by its ID.
        
        :param int entity_id: entity ID.
        :return: entity found or ``None``.
        """
        return self.__id_map.get(entity_id)

    def has_id(self, entity_id):
        """
        Checks if this entity cache holds an entity with the given ID.
        
        :return: Boolean result of the check.
        """
        return entity_id in self.__id_map

    def get_by_slug(self, entity_slug):
        """
        Performs a lookup of an entity by its slug.
        
        :param str entity_id: entity slug.
        :return: entity found or ``None``.
        """
        return self.__slug_map.get(entity_slug)

    def has_slug(self, entity_slug):
        return entity_slug in self.__slug_map

    def add(self, entity):
        """
        Adds the given entity to this cache.
        
        :param entity: Entity to add.
        :type entity: Object implementing :class:`everest.interfaces.IEntity`.
        :raises ValueError: If the ID of the entity to add is ``None``.
        """
        # For certain use cases (e.g., staging), we do not want the entity to
        # be added to have an ID yet.
        if not entity.id is None:
            if entity.id in self.__id_map:
                raise ValueError('Duplicate entity ID "%s".' % entity.id)
            self.__id_map[entity.id] = entity
        elif not self.__allow_none_id:
            raise ValueError('Entity ID must not be None.')
        # The slug can be a lazy attribute depending on the
        # value of other (possibly not yet initialized) attributes which is
        # why we can not always assume it is available at this point.
        if not entity.slug is None:
            if entity.slug in self.__slug_map:
                raise ValueError('Duplicate entity slug "%s".' % entity.slug)
            self.__slug_map[entity.slug] = entity
        self.__entities.append(entity)

    def remove(self, entity):
        """
        Removes the given entity from this cache.
        
        :param entity: Entity to remove.
        :type entity: Object implementing :class:`everest.interfaces.IEntity`.
        :raises KeyError: If the given entity is not in this cache.
        :raises ValueError: If the ID of the given entity is `None`.
        """
        if entity.id is None:
            raise ValueError('Entity ID must not be None.')
        del self.__id_map[entity.id]
        # We may not have the slug in the slug map because it might not have
        # been available by the time the entity was added.
        self.__slug_map.pop(entity.slug, None)
        self.__entities.remove(entity)

    def replace(self, entity):
        """
        Replaces the current entity that has the same ID as the given new
        entity with the latter.
        
        :param entity: Entity to replace.
        :type entity: Object implementing :class:`everest.interfaces.IEntity`.
        :raises KeyError: If the given entity is not in this cache.
        :raises ValueError: If the ID of the given entity is `None`.
        """
        if entity.id is None:
            raise ValueError('Entity ID must not be None.')
        old_entity = self.__id_map[entity.id]
        self.remove(old_entity)
        self.add(entity)

    def iterator(self):
        """
        Returns an iterator over all entities in this cache in the order they
        were added.
        """
        return iter(self.__entities)
Esempio n. 50
0
class TrollReactor(CBDictInterface):
    """Base class for Omegle API.
    """
    def __init__(self, transmog=Transmogrifier(), listen=Viewport(), n=2, refresh=1.5):
        # Independent setup
        super(TrollReactor, self).__init__()
        self.listeners = WeakValueDictionary()
        # Argument assignment
        self.eventQueue = deque()
        self.connectTransmogrifier(transmog)
        self.addListeners(listen)
        self._n = n
        self.refresh = refresh

        self.initializeStrangers()  # Now we wait to receive idSet events

    def connectTransmogrifier(self, transmog):
        self.transmogrifier = transmog
        self.transmogrifier.connect(self.eventQueue)

    def initializeStrangers(self):
        self._volatile = {Stranger(reactor, self, HTTP): None for _ in xrange(self._n)}
        self._waiting = len(self._volatile.keys())
        self.strangers = {}

    def multicastDisconnect(self, ids):
        """Announce disconnect for a group of strangers.

        ids : iterable
            id strings of strangers from whom to politely disconnect.
        """
        for i in ids:
            self.strangers[i].announceDisconnect()

    def pumpEvents(self):
        for id_ in self.strangers:
            self.strangers[id_].getEventsPage()

        reactor.callLater(self.refresh, self.pumpEvents)

    def on_idSet(self, ev):
        for s in self._volatile:
            if s.id == ev.id:  # we have the stranger that notified
                self.strangers[s.id] = s  # move to {id: stranger} dict
                self._waiting -= 1
        if self._waiting == 0:
            self.pumpEvents()
        elif self._waiting < 0:
            print_stack()
            stderr.write("ERROR:  too many stranger IDs.")
            reactor.stop()

    def addListeners(self, listeners):
        """Add a listener or group of listeners to the reactor.

        listeners : CBDictInterface instance or iterable
        """
        if not hasattr(listeners, '__iter__'):
            listeners = (listeners,)

        for listen in listeners:
            self.listeners[listen] = listen  # weak-value dict

    def removeListener(self, listener):
        self.listeners.pop(listener)

    def _processEventQueue(self):
        while len(self.eventQueue):
            ev = self.eventQueue.popleft()
            for listener in self.listeners:
                listener.notify(ev)

            self.notify(ev)

    def feed(self, events):
        """Notify the TrollReactor of an event.
        """
        if hasattr(events, '_fields'):
            events = (events,)  # convert to tuple

        self.transmogrifier(events)
        self._processEventQueue()
Esempio n. 51
0
class TaskQueue(AbstractTaskQueue):
    """Simple in-memory task queue implementation"""

    @classmethod
    def factory(cls, url, name=const.DEFAULT, *args, **kw):
        obj = _REFS.get((url, name))
        if obj is None:
            obj = _REFS[(url, name)] = cls(url, name, *args, **kw)
        return obj

    def __init__(self, *args, **kw):
        super(TaskQueue, self).__init__(*args, **kw)
        self.queue = Queue()
        self.results = WeakValueDictionary()
        self.results_lock = Lock()

    def _init_result(self, result, status, message):
        with self.results_lock:
            if result.id in self.results:
                return False
            self.results[result.id] = result
        result.__status = status
        result.__value = Queue()
        result.__task = message
        result.__args = {}
        result.__lock = Lock()
        result.__for = None
        return True

    def enqueue_task(self, result, message):
        if self._init_result(result, const.ENQUEUED, message):
            self.queue.put(result)
            return True
        return False

    def defer_task(self, result, message, args):
        if self._init_result(result, const.PENDING, message):
            results = self.results
            # keep references to results to prevent GC
            result.__refs = [results.get(arg) for arg in args]
            return True
        return False

    def undefer_task(self, task_id):
        result = self.results[task_id]
        self.queue.put(result)

    def get(self, timeout=None):
        try:
            result = self.queue.get(timeout=timeout)
        except Empty:
            return None
        result.__status = const.PROCESSING
        return result.id, result.__task

    def size(self):
        return len(self.results)

    def discard_pending(self):
        with self.results_lock:
            while True:
                try:
                    self.queue.get_nowait()
                except Empty:
                    break
            self.results.clear()

    def reserve_argument(self, argument_id, deferred_id):
        result = self.results.get(argument_id)
        if result is None:
            return (False, None)
        with result.__lock:
            if result.__for is not None:
                return (False, None)
            result.__for = deferred_id
            try:
                message = result.__value.get_nowait()
            except Empty:
                message = None
            if message is not None:
                with self.results_lock:
                    self.results.pop(argument_id, None)
            return (True, message)

    def set_argument(self, task_id, argument_id, message):
        result = self.results[task_id]
        with self.results_lock:
            self.results.pop(argument_id, None)
        with result.__lock:
            result.__args[argument_id] = message
            return len(result.__args) == len(result.__refs)

    def get_arguments(self, task_id):
        try:
            return self.results[task_id].__args
        except KeyError:
            return {}

    def set_task_timeout(self, task_id, timeout):
        pass

    def get_status(self, task_id):
        result = self.results.get(task_id)
        return None if result is None else result.__status

    def set_result(self, task_id, message, timeout):
        result = self.results.get(task_id)
        if result is not None:
            with result.__lock:
                result.__value.put(message)
                return result.__for

    def pop_result(self, task_id, timeout):
        result = self.results.get(task_id)
        if result is None:
            return const.TASK_EXPIRED
#        with result.__lock:
#            if result.__for is not None:
#                raise NotImplementedError
#                #return const.RESERVED
#            result.__for = task_id
        try:
            if timeout == 0:
                value = result.__value.get_nowait()
            else:
                value = result.__value.get(timeout=timeout)
        except Empty:
            value = None
        else:
            self.results.pop(task_id, None)
        return value

    def discard_result(self, task_id, task_expired_token):
        result = self.results.pop(task_id)
        if result is not None:
            result.__value.put(task_expired_token)
Esempio n. 52
0
class Boss:
    def __init__(self, os_window_id, opts, args, cached_values,
                 new_os_window_trigger):
        set_layout_options(opts)
        self.clipboard_buffers = {}
        self.update_check_process = None
        self.window_id_map = WeakValueDictionary()
        self.startup_colors = {
            k: opts[k]
            for k in opts if isinstance(opts[k], Color)
        }
        self.startup_cursor_text_color = opts.cursor_text_color
        self.pending_sequences = None
        self.cached_values = cached_values
        self.os_window_map = {}
        self.os_window_death_actions = {}
        self.cursor_blinking = True
        self.shutting_down = False
        talk_fd = getattr(single_instance, 'socket', None)
        talk_fd = -1 if talk_fd is None else talk_fd.fileno()
        listen_fd = -1
        if opts.allow_remote_control and args.listen_on:
            listen_fd = listen_on(args.listen_on)
        self.child_monitor = ChildMonitor(
            self.on_child_death,
            DumpCommands(args) if args.dump_commands or args.dump_bytes else
            None, talk_fd, listen_fd)
        set_boss(self)
        self.opts, self.args = opts, args
        startup_sessions = create_sessions(
            opts, args, default_session=opts.startup_session)
        self.keymap = self.opts.keymap.copy()
        if new_os_window_trigger is not None:
            self.keymap.pop(new_os_window_trigger, None)
        for startup_session in startup_sessions:
            self.add_os_window(startup_session, os_window_id=os_window_id)
            os_window_id = None
            if args.start_as != 'normal':
                if args.start_as == 'fullscreen':
                    self.toggle_fullscreen()
                else:
                    change_os_window_state(args.start_as)
        if is_macos:
            from .fast_data_types import cocoa_set_notification_activated_callback
            cocoa_set_notification_activated_callback(
                self.notification_activated)

    def add_os_window(self,
                      startup_session,
                      os_window_id=None,
                      wclass=None,
                      wname=None,
                      opts_for_size=None,
                      startup_id=None):
        if os_window_id is None:
            opts_for_size = opts_for_size or startup_session.os_window_size or self.opts
            cls = wclass or self.args.cls or appname
            with startup_notification_handler(
                    do_notify=startup_id is not None,
                    startup_id=startup_id) as pre_show_callback:
                os_window_id = create_os_window(
                    initial_window_size_func(opts_for_size,
                                             self.cached_values),
                    pre_show_callback, appname, wname or self.args.name or cls,
                    cls)
        tm = TabManager(os_window_id, self.opts, self.args, startup_session)
        self.os_window_map[os_window_id] = tm
        return os_window_id

    def list_os_windows(self):
        with cached_process_data():
            active_tab, active_window = self.active_tab, self.active_window
            active_tab_manager = self.active_tab_manager
            for os_window_id, tm in self.os_window_map.items():
                yield {
                    'id': os_window_id,
                    'is_focused': tm is active_tab_manager,
                    'tabs': list(tm.list_tabs(active_tab, active_window)),
                }

    @property
    def all_tab_managers(self):
        yield from self.os_window_map.values()

    @property
    def all_tabs(self):
        for tm in self.all_tab_managers:
            yield from tm

    @property
    def all_windows(self):
        for tab in self.all_tabs:
            yield from tab

    def match_windows(self, match):
        try:
            field, exp = match.split(':', 1)
        except ValueError:
            return
        if field == 'num':
            tab = self.active_tab
            if tab is not None:
                try:
                    w = tab.get_nth_window(int(exp))
                except Exception:
                    return
                if w is not None:
                    yield w
            return
        if field == 'env':
            kp, vp = exp.partition('=')[::2]
            if vp:
                pat = tuple(map(re.compile, (kp, vp)))
            else:
                pat = re.compile(kp), None
        else:
            pat = re.compile(exp)
        for window in self.all_windows:
            if window.matches(field, pat):
                yield window

    def tab_for_window(self, window):
        for tab in self.all_tabs:
            for w in tab:
                if w.id == window.id:
                    return tab

    def match_tabs(self, match):
        try:
            field, exp = match.split(':', 1)
        except ValueError:
            return
        pat = re.compile(exp)
        found = False
        if field in ('title', 'id'):
            for tab in self.all_tabs:
                if tab.matches(field, pat):
                    yield tab
                    found = True
        if not found:
            tabs = {self.tab_for_window(w) for w in self.match_windows(match)}
            for tab in tabs:
                if tab:
                    yield tab

    def set_active_window(self, window):
        for os_window_id, tm in self.os_window_map.items():
            for tab in tm:
                for w in tab:
                    if w.id == window.id:
                        if tab is not self.active_tab:
                            tm.set_active_tab(tab)
                        tab.set_active_window(w)
                        return os_window_id

    def _new_os_window(self, args, cwd_from=None):
        if isinstance(args, SpecialWindowInstance):
            sw = args
        else:
            sw = self.args_to_special_window(args, cwd_from) if args else None
        startup_session = next(
            create_sessions(self.opts, special_window=sw, cwd_from=cwd_from))
        return self.add_os_window(startup_session)

    def new_os_window(self, *args):
        self._new_os_window(args)

    @property
    def active_window_for_cwd(self):
        w = self.active_window
        if w is not None and w.overlay_for is not None and w.overlay_for in self.window_id_map:
            w = self.window_id_map[w.overlay_for]
        return w

    def new_os_window_with_cwd(self, *args):
        w = self.active_window_for_cwd
        cwd_from = w.child.pid_for_cwd if w is not None else None
        self._new_os_window(args, cwd_from)

    def new_os_window_with_wd(self, wd):
        special_window = SpecialWindow(None, cwd=wd)
        self._new_os_window(special_window)

    def add_child(self, window):
        self.child_monitor.add_child(window.id, window.child.pid,
                                     window.child.child_fd, window.screen)
        self.window_id_map[window.id] = window

    def _handle_remote_command(self, cmd, window=None):
        response = None
        if self.opts.allow_remote_control or getattr(
                window, 'allow_remote_control', False):
            try:
                response = handle_cmd(self, window, cmd)
            except Exception as err:
                import traceback
                response = {'ok': False, 'error': str(err)}
                if not getattr(err, 'hide_traceback', False):
                    response['tb'] = traceback.format_exc()
        else:
            response = {
                'ok':
                False,
                'error':
                'Remote control is disabled. Add allow_remote_control yes to your kitty.conf'
            }
        return response

    def peer_message_received(self, msg):
        msg = msg.decode('utf-8')
        cmd_prefix = '\x1bP@kitty-cmd'
        if msg.startswith(cmd_prefix):
            cmd = msg[len(cmd_prefix):-2]
            response = self._handle_remote_command(cmd)
            if response is not None:
                response = (cmd_prefix + json.dumps(response) +
                            '\x1b\\').encode('utf-8')
            return response
        else:
            msg = json.loads(msg)
            if isinstance(msg, dict) and msg.get('cmd') == 'new_instance':
                startup_id = msg.get('startup_id')
                args, rest = parse_args(msg['args'][1:])
                args.args = rest
                opts = create_opts(args)
                if not os.path.isabs(args.directory):
                    args.directory = os.path.join(msg['cwd'], args.directory)
                for session in create_sessions(opts, args, respect_cwd=True):
                    os_window_id = self.add_os_window(session,
                                                      wclass=args.cls,
                                                      wname=args.name,
                                                      opts_for_size=opts,
                                                      startup_id=startup_id)
                    if msg.get('notify_on_os_window_death'):
                        self.os_window_death_actions[os_window_id] = partial(
                            self.notify_on_os_window_death,
                            msg['notify_on_os_window_death'])
            else:
                log_error('Unknown message received from peer, ignoring')

    def handle_remote_cmd(self, cmd, window=None):
        response = self._handle_remote_command(cmd, window)
        if response is not None:
            if window is not None:
                window.send_cmd_response(response)

    def on_child_death(self, window_id):
        window = self.window_id_map.pop(window_id, None)
        if window is None:
            return
        if window.action_on_close:
            try:
                window.action_on_close(window)
            except Exception:
                import traceback
                traceback.print_exc()
        os_window_id = window.os_window_id
        window.destroy()
        tm = self.os_window_map.get(os_window_id)
        if tm is None:
            return
        for tab in tm:
            if window in tab:
                break
        else:
            return
        tab.remove_window(window)
        if len(tab) == 0:
            tm.remove(tab)
            tab.destroy()
            if len(tm) == 0:
                if not self.shutting_down:
                    mark_os_window_for_close(os_window_id)

    def close_window(self, window=None):
        if window is None:
            window = self.active_window
        self.child_monitor.mark_for_close(window.id)

    def close_tab(self, tab=None):
        if tab is None:
            tab = self.active_tab
        for window in tab:
            self.close_window(window)

    def toggle_fullscreen(self):
        toggle_fullscreen()

    def toggle_maximized(self):
        toggle_maximized()

    def start(self):
        if not getattr(self, 'io_thread_started', False):
            self.child_monitor.start()
            self.io_thread_started = True
        if self.opts.update_check_interval > 0 and not hasattr(
                self, 'update_check_started'):
            from .update_check import run_update_check
            run_update_check(self.opts.update_check_interval * 60 * 60)
            self.update_check_started = True

    def activate_tab_at(self, os_window_id, x):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            tm.activate_tab_at(x)

    def on_window_resize(self, os_window_id, w, h, dpi_changed):
        if dpi_changed:
            self.on_dpi_change(os_window_id)
        else:
            tm = self.os_window_map.get(os_window_id)
            if tm is not None:
                tm.resize()

    def clear_terminal(self, action, only_active):
        if only_active:
            windows = []
            w = self.active_window
            if w is not None:
                windows.append(w)
        else:
            windows = self.all_windows
        reset = action == 'reset'
        how = 3 if action == 'scrollback' else 2
        for w in windows:
            if action == 'scroll':
                w.screen.scroll_until_cursor()
                continue
            w.screen.cursor.x = w.screen.cursor.y = 0
            if reset:
                w.screen.reset()
            else:
                w.screen.erase_in_display(how, False)

    def increase_font_size(self):  # legacy
        cfs = global_font_size()
        self.set_font_size(min(self.opts.font_size * 5, cfs + 2.0))

    def decrease_font_size(self):  # legacy
        cfs = global_font_size()
        self.set_font_size(max(MINIMUM_FONT_SIZE, cfs - 2.0))

    def restore_font_size(self):  # legacy
        self.set_font_size(self.opts.font_size)

    def set_font_size(self, new_size):  # legacy
        self.change_font_size(True, None, new_size)

    def change_font_size(self, all_windows, increment_operation, amt):
        def calc_new_size(old_size):
            new_size = old_size
            if amt == 0:
                new_size = self.opts.font_size
            else:
                if increment_operation:
                    new_size += (1 if increment_operation == '+' else -1) * amt
                else:
                    new_size = amt
                new_size = max(MINIMUM_FONT_SIZE,
                               min(new_size, self.opts.font_size * 5))
            return new_size

        if all_windows:
            current_global_size = global_font_size()
            new_size = calc_new_size(current_global_size)
            if new_size != current_global_size:
                global_font_size(new_size)
            os_windows = tuple(self.os_window_map.keys())
        else:
            os_windows = []
            w = self.active_window
            if w is not None:
                os_windows.append(w.os_window_id)
        if os_windows:
            final_windows = {}
            for wid in os_windows:
                current_size = os_window_font_size(wid)
                if current_size:
                    new_size = calc_new_size(current_size)
                    if new_size != current_size:
                        final_windows[wid] = new_size
            if final_windows:
                self._change_font_size(final_windows)

    def _change_font_size(self, sz_map):
        for os_window_id, sz in sz_map.items():
            tm = self.os_window_map.get(os_window_id)
            if tm is not None:
                os_window_font_size(os_window_id, sz)
                tm.resize()

    def on_dpi_change(self, os_window_id):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            sz = os_window_font_size(os_window_id)
            if sz:
                os_window_font_size(os_window_id, sz, True)
                tm.resize()

    def _set_os_window_background_opacity(self, os_window_id, opacity):
        change_background_opacity(os_window_id, max(0.1, min(opacity, 1.0)))

    def set_background_opacity(self, opacity):
        window = self.active_window
        if window is None or not opacity:
            return
        if not self.opts.dynamic_background_opacity:
            return self.show_error(
                _('Cannot change background opacity'),
                _('You must set the dynamic_background_opacity option in kitty.conf to be able to change background opacity'
                  ))
        os_window_id = window.os_window_id
        if opacity[0] in '+-':
            old_opacity = background_opacity_of(os_window_id)
            if old_opacity is None:
                return
            opacity = old_opacity + float(opacity)
        elif opacity == 'default':
            opacity = self.opts.background_opacity
        else:
            opacity = float(opacity)
        self._set_os_window_background_opacity(os_window_id, opacity)

    @property
    def active_tab_manager(self):
        os_window_id = current_os_window()
        return self.os_window_map.get(os_window_id)

    @property
    def active_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            return tm.active_tab

    @property
    def active_window(self):
        t = self.active_tab
        if t is not None:
            return t.active_window

    def dispatch_special_key(self, key, scancode, action, mods):
        # Handles shortcuts, return True if the key was consumed
        key_action = get_shortcut(self.keymap, mods, key, scancode)
        if key_action is None:
            sequences = get_shortcut(self.opts.sequence_map, mods, key,
                                     scancode)
            if sequences:
                self.pending_sequences = sequences
                set_in_sequence_mode(True)
                return True
        else:
            self.current_key_press_info = key, scancode, action, mods
            return self.dispatch_action(key_action)

    def process_sequence(self, key, scancode, action, mods):
        if not self.pending_sequences:
            set_in_sequence_mode(False)

        remaining = {}
        matched_action = None
        for seq, key_action in self.pending_sequences.items():
            if shortcut_matches(seq[0], mods, key, scancode):
                seq = seq[1:]
                if seq:
                    remaining[seq] = key_action
                else:
                    matched_action = key_action

        if remaining:
            self.pending_sequences = remaining
        else:
            self.pending_sequences = None
            set_in_sequence_mode(False)
            if matched_action is not None:
                self.dispatch_action(matched_action)

    def start_resizing_window(self):
        w = self.active_window
        if w is None:
            return
        overlay_window = self._run_kitten(
            'resize_window',
            args=[
                '--horizontal-increment={}'.format(
                    self.opts.window_resize_step_cells),
                '--vertical-increment={}'.format(
                    self.opts.window_resize_step_lines)
            ])
        if overlay_window is not None:
            overlay_window.allow_remote_control = True

    def resize_layout_window(self,
                             window,
                             increment,
                             is_horizontal,
                             reset=False):
        tab = window.tabref()
        if tab is None or not increment:
            return False
        if reset:
            return tab.reset_window_sizes()
        return tab.resize_window_by(window.id, increment, is_horizontal)

    def default_bg_changed_for(self, window_id):
        w = self.window_id_map.get(window_id)
        if w is not None:
            tm = self.os_window_map.get(w.os_window_id)
            if tm is not None:
                tm.update_tab_bar_data()
                tm.mark_tab_bar_dirty()
                t = tm.tab_for_id(w.tab_id)
                if t is not None:
                    t.relayout_borders()

    def dispatch_action(self, key_action):
        if key_action is not None:
            f = getattr(self, key_action.func, None)
            if f is not None:
                if self.args.debug_keyboard:
                    print('Keypress matched action:', func_name(f))
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        tab = self.active_tab
        if tab is None:
            return False
        window = self.active_window
        if window is None:
            return False
        if key_action is not None:
            f = getattr(tab, key_action.func,
                        getattr(window, key_action.func, None))
            if f is not None:
                passthrough = f(*key_action.args)
                if self.args.debug_keyboard:
                    print('Keypress matched action:', func_name(f))
                if passthrough is not True:
                    return True
        return False

    def combine(self, *actions):
        for key_action in actions:
            self.dispatch_action(key_action)

    def on_focus(self, os_window_id, focused):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.focus_changed(focused)
            tm.mark_tab_bar_dirty()

    def update_tab_bar_data(self, os_window_id):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            tm.update_tab_bar_data()

    def on_drop(self, os_window_id, paths):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.paste('\n'.join(paths))

    def on_os_window_closed(self, os_window_id, viewport_width,
                            viewport_height):
        self.cached_values['window-size'] = viewport_width, viewport_height
        tm = self.os_window_map.pop(os_window_id, None)
        if tm is not None:
            tm.destroy()
        for window_id in tuple(
                w.id for w in self.window_id_map.values()
                if getattr(w, 'os_window_id', None) == os_window_id):
            self.window_id_map.pop(window_id, None)
        action = self.os_window_death_actions.pop(os_window_id, None)
        if action is not None:
            action()

    def notify_on_os_window_death(self, address):
        import socket
        s = socket.socket(family=socket.AF_UNIX)
        with suppress(Exception):
            s.connect(address)
            s.sendall(b'c')
            with suppress(EnvironmentError):
                s.shutdown(socket.SHUT_RDWR)
            s.close()

    def display_scrollback(self, window, data, cmd):
        tab = self.active_tab
        if tab is not None and window.overlay_for is None:
            tab.new_special_window(
                SpecialWindow(cmd, data, _('History'), overlay_for=window.id))

    def edit_config_file(self, *a):
        confpath = prepare_config_file_for_editing()
        # On macOS vim fails to handle SIGWINCH if it occurs early, so add a
        # small delay.
        cmd = [
            kitty_exe(), '+runpy',
            'import os, sys, time; time.sleep(0.05); os.execvp(sys.argv[1], sys.argv[1:])'
        ] + get_editor() + [confpath]
        self.new_os_window(*cmd)

    def get_output(self, source_window, num_lines=1):
        output = ''
        s = source_window.screen
        if num_lines is None:
            num_lines = s.lines
        for i in range(min(num_lines, s.lines)):
            output += str(s.linebuf.line(i))
        return output

    def _run_kitten(self, kitten, args=(), input_data=None, window=None):
        orig_args, args = list(args), list(args)
        from kittens.runner import create_kitten_handler
        end_kitten = create_kitten_handler(kitten, orig_args)
        if window is None:
            w = self.active_window
            tab = self.active_tab
        else:
            w = window
            tab = w.tabref()
        if end_kitten.no_ui:
            end_kitten(None, getattr(w, 'id', None), self)
            return

        if w is not None and tab is not None and w.overlay_for is None:
            args[0:0] = [config_dir, kitten]
            if input_data is None:
                type_of_input = end_kitten.type_of_input
                if type_of_input in ('text', 'history', 'ansi', 'ansi-history',
                                     'screen', 'screen-history', 'screen-ansi',
                                     'screen-ansi-history'):
                    data = w.as_text(as_ansi='ansi' in type_of_input,
                                     add_history='history' in type_of_input,
                                     add_wrap_markers='screen'
                                     in type_of_input).encode('utf-8')
                elif type_of_input is None:
                    data = None
                else:
                    raise ValueError(
                        'Unknown type_of_input: {}'.format(type_of_input))
            else:
                data = input_data
            if isinstance(data, str):
                data = data.encode('utf-8')
            copts = {
                k: self.opts[k]
                for k in ('select_by_word_characters', 'open_url_with')
            }
            overlay_window = tab.new_special_window(
                SpecialWindow([
                    kitty_exe(), '+runpy',
                    'from kittens.runner import main; main()'
                ] + args,
                              stdin=data,
                              env={
                                  'KITTY_COMMON_OPTS': json.dumps(copts),
                                  'KITTY_CHILD_PID': w.child.pid,
                                  'PYTHONWARNINGS': 'ignore',
                                  'OVERLAID_WINDOW_LINES': str(w.screen.lines),
                                  'OVERLAID_WINDOW_COLS':
                                  str(w.screen.columns),
                              },
                              cwd=w.cwd_of_child,
                              overlay_for=w.id))
            overlay_window.action_on_close = partial(self.on_kitten_finish,
                                                     w.id, end_kitten)
            return overlay_window

    def kitten(self, kitten, *args):
        import shlex
        cmdline = args[0] if args else ''
        args = shlex.split(cmdline) if cmdline else []
        self._run_kitten(kitten, args)

    def on_kitten_finish(self, target_window_id, end_kitten, source_window):
        output = self.get_output(source_window, num_lines=None)
        from kittens.runner import deserialize
        data = deserialize(output)
        if data is not None:
            end_kitten(data, target_window_id, self)

    def input_unicode_character(self):
        self._run_kitten('unicode_input')

    def set_tab_title(self):
        tab = self.active_tab
        if tab:
            args = [
                '--name=tab-title', '--message',
                _('Enter the new title for this tab below.'),
                'do_set_tab_title',
                str(tab.id)
            ]
            self._run_kitten('ask', args)

    def show_error(self, title, msg):
        self._run_kitten('show_error', args=['--title', title], input_data=msg)

    def do_set_tab_title(self, title, tab_id):
        tm = self.active_tab_manager
        if tm is not None and title:
            tab_id = int(tab_id)
            for tab in tm.tabs:
                if tab.id == tab_id:
                    tab.set_title(title)
                    break

    def kitty_shell(self, window_type):
        cmd = ['@', kitty_exe(), '@']
        if window_type == 'tab':
            self._new_tab(cmd)
        elif window_type == 'os_window':
            os_window_id = self._new_os_window(cmd)
            self.os_window_map[os_window_id]
        elif window_type == 'overlay':
            w = self.active_window
            tab = self.active_tab
            if w is not None and tab is not None and w.overlay_for is None:
                tab.new_special_window(SpecialWindow(cmd, overlay_for=w.id))
        else:
            self._new_window(cmd)

    def switch_focus_to(self, window_idx):
        tab = self.active_tab
        tab.set_active_window_idx(window_idx)

    def open_url(self, url, program=None, cwd=None):
        if url:
            if isinstance(program, str):
                program = to_cmdline(program)
            open_url(url, program or self.opts.open_url_with, cwd=cwd)

    def open_url_lines(self, lines, program=None):
        self.open_url(''.join(lines), program)

    def destroy(self):
        self.shutting_down = True
        self.child_monitor.shutdown_monitor()
        self.set_update_check_process()
        self.update_check_process = None
        del self.child_monitor
        for tm in self.os_window_map.values():
            tm.destroy()
        self.os_window_map = {}
        destroy_global_data()

    def paste_to_active_window(self, text):
        if text:
            w = self.active_window
            if w is not None:
                w.paste(text)

    def paste_from_clipboard(self):
        text = get_clipboard_string()
        self.paste_to_active_window(text)

    def paste_from_selection(self):
        text = get_primary_selection(
        ) if supports_primary_selection else get_clipboard_string()
        self.paste_to_active_window(text)

    def set_primary_selection(self):
        w = self.active_window
        if w is not None and not w.destroyed:
            text = w.text_for_selection()
            if text:
                set_primary_selection(text)
                if self.opts.copy_on_select:
                    self.copy_to_buffer(self.opts.copy_on_select)

    def copy_to_buffer(self, buffer_name):
        w = self.active_window
        if w is not None and not w.destroyed:
            text = w.text_for_selection()
            if text:
                if buffer_name == 'clipboard':
                    set_clipboard_string(text)
                elif buffer_name == 'primary':
                    set_primary_selection(text)
                else:
                    self.clipboard_buffers[buffer_name] = text

    def paste_from_buffer(self, buffer_name):
        if buffer_name == 'clipboard':
            text = get_clipboard_string()
        elif buffer_name == 'primary':
            text = get_primary_selection()
        else:
            text = self.clipboard_buffers.get(buffer_name)
        if text:
            self.paste_to_active_window(text)

    def goto_tab(self, tab_num):
        tm = self.active_tab_manager
        if tm is not None:
            tm.goto_tab(tab_num - 1)

    def set_active_tab(self, tab):
        tm = self.active_tab_manager
        if tm is not None:
            return tm.set_active_tab(tab)
        return False

    def next_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab()

    def previous_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab(-1)

    prev_tab = previous_tab

    def process_stdin_source(self, window=None, stdin=None):
        w = window or self.active_window
        env = None
        if stdin:
            add_wrap_markers = stdin.endswith('_wrap')
            if add_wrap_markers:
                stdin = stdin[:-len('_wrap')]
            stdin = data_for_at(w, stdin, add_wrap_markers=add_wrap_markers)
            if stdin is not None:
                pipe_data = w.pipe_data(
                    stdin, has_wrap_markers=add_wrap_markers) if w else {}
                if pipe_data:
                    env = {
                        'KITTY_PIPE_DATA':
                        '{scrolled_by}:{cursor_x},{cursor_y}:{lines},{columns}'
                        .format(**pipe_data)
                    }
                stdin = stdin.encode('utf-8')
        return env, stdin

    def special_window_for_cmd(self,
                               cmd,
                               window=None,
                               stdin=None,
                               cwd_from=None,
                               as_overlay=False):
        w = window or self.active_window
        env, stdin = self.process_stdin_source(w, stdin)
        cmdline = []
        for arg in cmd:
            if arg == '@selection':
                arg = data_for_at(w, arg)
                if not arg:
                    continue
            cmdline.append(arg)
        overlay_for = w.id if as_overlay and w.overlay_for is None else None
        return SpecialWindow(cmd,
                             stdin,
                             cwd_from=cwd_from,
                             overlay_for=overlay_for,
                             env=env)

    def pipe(self, source, dest, exe, *args):
        cmd = [exe] + list(args)
        window = self.active_window
        cwd_from = window.child.pid_for_cwd if window else None

        def create_window():
            return self.special_window_for_cmd(cmd,
                                               stdin=source,
                                               as_overlay=dest == 'overlay',
                                               cwd_from=cwd_from)

        if dest == 'overlay' or dest == 'window':
            tab = self.active_tab
            if tab is not None:
                return tab.new_special_window(create_window())
        elif dest == 'tab':
            tm = self.active_tab_manager
            if tm is not None:
                tm.new_tab(special_window=create_window(), cwd_from=cwd_from)
        elif dest == 'os_window':
            self._new_os_window(create_window(), cwd_from=cwd_from)
        elif dest in ('clipboard', 'primary'):
            env, stdin = self.process_stdin_source(stdin=source, window=window)
            if stdin:
                func = set_clipboard_string if dest == 'clipboard' else set_primary_selection
                func(stdin)
        else:
            import subprocess
            env, stdin = self.process_stdin_source(stdin=source, window=window)
            cwd = None
            if cwd_from:
                with suppress(Exception):
                    cwd = cwd_of_process(cwd_from)
            if stdin:
                r, w = safe_pipe(False)
                try:
                    subprocess.Popen(cmd, env=env, stdin=r, cwd=cwd)
                except Exception:
                    os.close(w)
                else:
                    thread_write(w, stdin)
                finally:
                    os.close(r)
            else:
                subprocess.Popen(cmd, env=env, cwd=cwd)

    def args_to_special_window(self, args, cwd_from=None):
        args = list(args)
        stdin = None
        w = self.active_window

        if args[0].startswith('@') and args[0] != '@':
            stdin = data_for_at(w, args[0]) or None
            if stdin is not None:
                stdin = stdin.encode('utf-8')
            del args[0]

        cmd = []
        for arg in args:
            if arg == '@selection':
                arg = data_for_at(w, arg)
                if not arg:
                    continue
            cmd.append(arg)
        return SpecialWindow(cmd, stdin, cwd_from=cwd_from)

    def _new_tab(self, args, cwd_from=None, as_neighbor=False):
        special_window = None
        if args:
            if isinstance(args, SpecialWindowInstance):
                special_window = args
            else:
                special_window = self.args_to_special_window(args,
                                                             cwd_from=cwd_from)
        tm = self.active_tab_manager
        if tm is not None:
            return tm.new_tab(special_window=special_window,
                              cwd_from=cwd_from,
                              as_neighbor=as_neighbor)

    def _create_tab(self, args, cwd_from=None):
        as_neighbor = False
        if args and args[0].startswith('!'):
            as_neighbor = 'neighbor' in args[0][1:].split(',')
            args = args[1:]
        self._new_tab(args, as_neighbor=as_neighbor, cwd_from=cwd_from)

    def new_tab(self, *args):
        self._create_tab(args)

    def new_tab_with_cwd(self, *args):
        w = self.active_window_for_cwd
        cwd_from = w.child.pid_for_cwd if w is not None else None
        self._create_tab(args, cwd_from=cwd_from)

    def new_tab_with_wd(self, wd):
        special_window = SpecialWindow(None, cwd=wd)
        self._new_tab(special_window)

    def _new_window(self, args, cwd_from=None):
        tab = self.active_tab
        if tab is not None:
            location = None
            if args and args[0].startswith('!'):
                location = args[0][1:].lower()
                args = args[1:]
            if args:
                return tab.new_special_window(self.args_to_special_window(
                    args, cwd_from=cwd_from),
                                              location=location)
            else:
                return tab.new_window(cwd_from=cwd_from, location=location)

    def new_window(self, *args):
        self._new_window(args)

    def new_window_with_cwd(self, *args):
        w = self.active_window_for_cwd
        if w is None:
            return self.new_window(*args)
        cwd_from = w.child.pid_for_cwd if w is not None else None
        self._new_window(args, cwd_from=cwd_from)

    def move_tab_forward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(1)

    def move_tab_backward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(-1)

    def disable_ligatures_in(self, where, strategy):
        if isinstance(where, str):
            windows = ()
            if where == 'active':
                if self.active_window is not None:
                    windows = (self.active_window, )
            elif where == 'all':
                windows = self.all_windows
            elif where == 'tab':
                if self.active_tab is not None:
                    windows = tuple(self.active_tab)
        else:
            windows = where
        for window in windows:
            window.screen.disable_ligatures = strategy
            window.refresh()

    def patch_colors(self, spec, cursor_text_color, configured=False):
        if configured:
            for k, v in spec.items():
                if hasattr(self.opts, k):
                    setattr(self.opts, k, color_from_int(v))
            if cursor_text_color is not False:
                if isinstance(cursor_text_color, int):
                    cursor_text_color = color_from_int(cursor_text_color)
                self.opts.cursor_text_color = cursor_text_color
        for tm in self.all_tab_managers:
            tm.tab_bar.patch_colors(spec)
        patch_global_colors(spec, configured)

    def safe_delete_temp_file(self, path):
        if is_path_in_temp_dir(path):
            with suppress(FileNotFoundError):
                os.remove(path)

    def set_update_check_process(self, process=None):
        if self.update_check_process is not None:
            with suppress(Exception):
                if self.update_check_process.poll() is None:
                    self.update_check_process.kill()
        self.update_check_process = process

    def on_monitored_pid_death(self, pid, exit_status):
        update_check_process = getattr(self, 'update_check_process', None)
        if update_check_process is not None and pid == update_check_process.pid:
            self.update_check_process = None
            from .update_check import process_current_release
            try:
                raw = update_check_process.stdout.read().decode('utf-8')
            except Exception as e:
                log_error(
                    'Failed to read data from update check process, with error: {}'
                    .format(e))
            else:
                try:
                    process_current_release(raw)
                except Exception as e:
                    log_error(
                        'Failed to process update check data {!r}, with error: {}'
                        .format(raw, e))

    def notification_activated(self, identifier):
        if identifier == 'new-version':
            from .update_check import notification_activated
            notification_activated()

    def dbus_notification_callback(self, activated, *args):
        from .notify import dbus_notification_created, dbus_notification_activated
        if activated:
            dbus_notification_activated(*args)
        else:
            dbus_notification_created(*args)

    def show_bad_config_lines(self, bad_lines):
        def format_bad_line(bad_line):
            return '{}:{} in line: {}\n'.format(bad_line.number,
                                                bad_line.exception,
                                                bad_line.line)

        msg = '\n'.join(map(format_bad_line, bad_lines)).rstrip()
        self.show_error(_('Errors in kitty.conf'), msg)
Esempio n. 53
0
class PropAnimation(QtCore.QPropertyAnimation):
    
    base_time = 350
    disable_animation = True


    def __init__(self, widget, name, time_factor=1.0, force=False):

        super(PropAnimation, self).__init__(widget, name)
        self.stateChanged.connect(self._onStateChanged)
        self._force = force

        self.dic = WeakValueDictionary()
        self.dic['widget'] = widget

        self.__new = None

        self.time = time_factor * self.base_time
        self.setEasingCurve(QtCore.QEasingCurve.InOutQuad)

    @property
    def widget(self):
        return self.dic['widget']

    def _onStateChanged(self, state):

        if state == self.Running:
            running_animations.append(self)

            if self.dic.get('__reparent'):
                self.dic['__old_parent'] = self.widget.parentItem()
                self.widget.setParentItem(self.dic['__reparent'])

        elif state == self.Stopped:

            try:
                attr = getattr(self.dic['widget'], 'set' + str(self.propertyName()).capitalize())
            except RuntimeError:
                logger.ddebug('runtime error for {}'.format(self))
                #FUCKIT
                return

            attr(self.__new)
            self.__new = None
            if self.dic.get('__reparent'):
                self.widget.setParentItem(self.dic['__old_parent'])
                self.dic.pop('__reparent')
                self.dic.pop('__old_parent')
            running_animations.remove(self)


    def setup(self, new, old=None, reparent=None, path_function=None):
        
        if self.state() == self.Running and not self._force:
            return False

        self.__new = new

        if old is None:
            old = getattr(self.dic['widget'], str(self.propertyName()))
            if hasattr(old, '__call__'):
                old = old()

        if reparent:
            self.dic['__reparent'] = reparent
            old = self.widget.mapToItem(reparent, old)
            new = self.widget.mapToItem(reparent, new)

        if path_function:
            points = path_function(old, new)
            self.setKeyValues([])
            for i, p in enumerate(points):
                self.setKeyValueAt(float(i) / len(points), p)

        self.setDuration(self.time)
        self.setStartValue(old)
        self.setEndValue(new)
        return True

    def start(self):

        if self.state() == self.Running and not self._force:
            return False

        super(PropAnimation, self).start()
        return True