Ejemplo n.º 1
0
class Showcase(SessionSingleton):
    '''
    This class enables other classes to translate names previously registered
    to actual objects

    '''

    def __init__(self):
        self._objects = WeakValueDictionary()
        self._cases = WeakValueDictionary()

    def get(self, oid):
        '''
        :param str oid: the oid registered in the NameAuthority
        '''
        return self._objects.get(oid)

    def put(self, instance):
        '''
        :param INamed oid: the exposed object.
        '''
        self._objects[instance.oid] = instance

    def get_case(self, tag, default=None):
        '''
        :param str tag: The tag that the returned case should have
        '''
        return self._cases.get(tag, default)
Ejemplo n.º 2
0
class Monitor(QObject):
	"""File monitor

	This monitor can be used to track single files
	"""

	def __init__(self, **kwargs):
		super(Monitor, self).__init__(**kwargs)

		self.watched = WeakValueDictionary()
		self.delMapper = QSignalMapper(self)
		self.delMapper.mapped[str].connect(self.unmonitorFile)

		self.watcher = MonitorWithRename(parent=self)
		self.watcher.fileChanged.connect(self._onFileChanged)

	def monitorFile(self, path):
		"""Monitor a file and return an object that tracks only `path`

		:rtype: SingleFileWatcher
		:return: an object tracking `path`, the same object is returned if the method is called
		         with the same path.
		"""
		path = os.path.abspath(path)

		self.watcher.addPath(path)

		proxy = self.watched.get(path)
		if not proxy:
			proxy = SingleFileWatcher(path)
			proxy.destroyed.connect(self.delMapper.map)
			self.delMapper.setMapping(proxy, path)
			self.watched[path] = proxy

		return proxy

	@Slot(str)
	def unmonitorFile(self, path):
		"""Stop monitoring a file

		Since there is only one :any:`SingleFileWatcher` object per path, all objects monitoring
		`path` will not receive notifications anymore.
		To let only one object stop monitoring the file, simply disconnect its `modified` signal.
		When the :any:`SingleFileWatcher` object returned by method :any:`monitorFile` is
		destroyed, the file is automatically un-monitored.
		"""
		path = os.path.abspath(path)

		self.watcher.removePath(path)
		self.watched.pop(path, None)

	@Slot(str)
	def _onFileChanged(self, path):
		proxy = self.watched.get(path)
		if proxy:
			proxy.modified.emit()
Ejemplo n.º 3
0
class Monitor(QObject):
    """File monitor

	This monitor can be used to track single files
	"""
    def __init__(self, **kwargs):
        super(Monitor, self).__init__(**kwargs)

        self.watched = WeakValueDictionary()
        self.delMapper = QSignalMapper(self)
        self.delMapper.mapped[str].connect(self.unmonitorFile)

        self.watcher = MonitorWithRename(parent=self)
        self.watcher.fileChanged.connect(self._onFileChanged)

    def monitorFile(self, path):
        """Monitor a file and return an object that tracks only `path`

		:rtype: SingleFileWatcher
		:return: an object tracking `path`, the same object is returned if the method is called
		         with the same path.
		"""
        path = os.path.abspath(path)

        self.watcher.addPath(path)

        proxy = self.watched.get(path)
        if not proxy:
            proxy = SingleFileWatcher(path)
            proxy.destroyed.connect(self.delMapper.map)
            self.delMapper.setMapping(proxy, path)
            self.watched[path] = proxy

        return proxy

    @Slot(str)
    def unmonitorFile(self, path):
        """Stop monitoring a file

		Since there is only one :any:`SingleFileWatcher` object per path, all objects monitoring
		`path` will not receive notifications anymore.
		To let only one object stop monitoring the file, simply disconnect its `modified` signal.
		When the :any:`SingleFileWatcher` object returned by method :any:`monitorFile` is
		destroyed, the file is automatically un-monitored.
		"""
        path = os.path.abspath(path)

        self.watcher.removePath(path)
        self.watched.pop(path, None)

    @Slot(str)
    def _onFileChanged(self, path):
        proxy = self.watched.get(path)
        if proxy:
            proxy.modified.emit()
Ejemplo n.º 4
0
class Reader:
    def __init__(self, document: Document):
        self.standalone_instances = WeakValueDictionary()
        self.tables = {}
        self.registry = TypeRegistry.instance
        self.document = document

    @property
    def file(self):
        return self.document.file

    def get_type_by_id(self, type_id):
        return self.registry.get_by_id(type_id)

    def find_instances(self, cls):
        return self.document.find_instances(cls, context=self)

    def add_instance(self, instance):
        if instance.__vo_id__ is not None:
            self.standalone_instances[instance.__vo_id__] = instance

    def get_instance_by_id(self, instance_id):
        return self.standalone_instances.get(instance_id, None)

    def add_table(self, table_id, table):
        self.tables[table_id] = table

    def get_table_by_id(self, table_id):
        return self.tables.get(table_id, None)
Ejemplo n.º 5
0
class _TransformExecutorServices(object):
  """Schedules and completes TransformExecutors.

  Controls the concurrency as appropriate for the applied transform the executor
  exists for.
  """

  def __init__(self, executor_service):
    self._executor_service = executor_service
    self._scheduled = set()
    self._parallel = _ParallelEvaluationState(
        self._executor_service, self._scheduled)
    self._serial_cache = WeakValueDictionary()

  def parallel(self):
    return self._parallel

  def serial(self, step):
    cached = self._serial_cache.get(step)
    if not cached:
      cached = _SerialEvaluationState(self._executor_service, self._scheduled)
      self._serial_cache[step] = cached
    return cached

  @property
  def executors(self):
    return frozenset(self._scheduled)
Ejemplo n.º 6
0
class Root(Directory):
    cls_dir_class = Directory
    def __init__(self, dbname=':memory:', dirname='', threaded=True):
        super(Root,self).__init__(None,dirname)
        if threaded:
            self.con = hscommon.sqlite.ThreadedConn(dbname, False)
        else:
            self.con = sqlite.connect(dbname)
        self.id = 0
        self._attrs_to_read = None
        self._id_cache = WeakValueDictionary()
        self._id_cache[0] = self
        try:
            cur = self.con.execute("select * from nodes where 1=2")
        except sqlite.OperationalError:
            self.__create_tables()
        self._load_from_db()
    
    def __create_tables(self):
        sqls = [
            'create table nodes(parent INT,type INT,name TEXT);',
            'create table attrs(parent INT,name TEXT,type INT,value TEXT);',
            'create index idx_node_parent on nodes(parent);',
            'create index idx_node_name on nodes(name);',
            'create index idx_attr_parent on attrs(parent);',
            'create index idx_attr_name on attrs(name);',
            'create unique index idx_node_unique on nodes(parent,name);',
            'create unique index idx_attr_unique on attrs(parent,name);'
        ]
        for sql in sqls:
            self.con.execute(sql)
    
    def find_node_of_id(self,id):
        return self._id_cache.get(id)
Ejemplo n.º 7
0
class TransformExecutorServices(object):
  """Schedules and completes TransformExecutors.

  Controls the concurrency as appropriate for the applied transform the executor
  exists for.
  """

  def __init__(self, executor_service):
    self._executor_service = executor_service
    self._scheduled = set()
    self._parallel = ParallelEvaluationState(
        self._executor_service, self._scheduled)
    self._serial_cache = WeakValueDictionary()

  def parallel(self):
    return self._parallel

  def serial(self, step):
    cached = self._serial_cache.get(step)
    if not cached:
      cached = SerialEvaluationState(self._executor_service, self._scheduled)
      self._serial_cache[step] = cached
    return cached

  @property
  def executors(self):
    return frozenset(self._scheduled)
Ejemplo n.º 8
0
class HasWeaks(metaclass=HasWeaksMeta):

    """A mix-in to allow objects to store weak references to other objects.

    Each individual class or instance of a class that subclasses this will
    have a separate weak reference dict, there is no inheritance.

    """

    def __init__(self):
        super().__init__()
        self._get_weak_wr = WeakMethod(self._inst_get_weak)
        self._get_weak = lambda k: self._get_weak_wr()(k)
        self._set_weak_wr = WeakMethod(self._inst_set_weak)
        self._set_weak = lambda k, o: self._set_weak_wr()(k, o)
        self._del_weak_wr = WeakMethod(self._inst_del_weak)
        self._del_weak = lambda k: self._del_weak_wr()(k)
        self._weak_refs = WeakValueDictionary()

    def _inst_get_weak(self, name):
        return self._weak_refs.get(name)

    def _inst_set_weak(self, name, obj):
        if obj is None:
            self._del_weak(name)
        else:
            self._weak_refs[name] = obj

    def _inst_del_weak(self, name):
        if name in self._weak_refs:
            del self._weak_refs[name]
Ejemplo n.º 9
0
class XKeyListener(Thread):
	def __init__(self, display, root):
		super().__init__()
		self.daemon = True
		self.display = display
		self.root = root
		self.listenFor = WeakValueDictionary()
		self.start()
	#enddef

	def run(self):
		self.root.xinput_select_events([(xinput.AllDevices, xinput.KeyPressMask)])
		while True:
			event = self.display.next_event()
			if event.type == X.KeyPress:
				cb = self.listenFor.get((event.detail, event. state), None)
				if cb: cb.trigger()
			#endif
		#endwhile
	#enddef

	def register(self, key, mods, sk):
		self.listenFor[(key, mods)] = sk
	#enddef

	def unregister(self, key, mods):
		del self.listenFor[(key, mods)]
Ejemplo n.º 10
0
class LRUCache:
	def __init__(self, max_size):
		self.LRU = [Node(time(), "none%s"%i) for i in range(max_size)]
		self.search = WeakValueDictionary()
		for i in self.LRU:
			self.search[i.name] = i
		
	def __setitem__(self, name, value):
		q = self.search.get(name, None)
		if q:
			q.data = value
			q.time = time()
		else:
			lru = self.LRU[0]
			self.search.pop(lru.name)
			lru.data = value
			lru.time = time()
			lru.name = name
			self.search[lru.name] = lru
		self.LRU.sort()
		
	def get(self, name, default=None):
		pos = None
		try:
			pos = self.search.__getitem__(name)
			pos.time = time()
			return pos.data
		except KeyError:
			if default is not None:
				return default
			else:
				raise
Ejemplo n.º 11
0
class Root(Directory):
    cls_dir_class = Directory

    def __init__(self, dbname=':memory:', dirname='', threaded=True):
        super(Root, self).__init__(None, dirname)
        if threaded:
            self.con = hscommon.sqlite.ThreadedConn(dbname, False)
        else:
            self.con = sqlite.connect(dbname)
        self.id = 0
        self._attrs_to_read = None
        self._id_cache = WeakValueDictionary()
        self._id_cache[0] = self
        try:
            cur = self.con.execute("select * from nodes where 1=2")
        except sqlite.OperationalError:
            self.__create_tables()
        self._load_from_db()

    def __create_tables(self):
        sqls = [
            'create table nodes(parent INT,type INT,name TEXT);',
            'create table attrs(parent INT,name TEXT,type INT,value TEXT);',
            'create index idx_node_parent on nodes(parent);',
            'create index idx_node_name on nodes(name);',
            'create index idx_attr_parent on attrs(parent);',
            'create index idx_attr_name on attrs(name);',
            'create unique index idx_node_unique on nodes(parent,name);',
            'create unique index idx_attr_unique on attrs(parent,name);'
        ]
        for sql in sqls:
            self.con.execute(sql)

    def find_node_of_id(self, id):
        return self._id_cache.get(id)
Ejemplo n.º 12
0
class TimerWnd(object):
    def __init__(self):
        win = ui.CreateFrame()
        win.CreateWindow(None, "", 0, (0, 0, 0, 0))
        win.AttachObject(self)
        self._win = win
        self.tasks = WeakValueDictionary()

    def schedule(self, task):
        self.cancel(task)
        event_id = id(task)
        timer_id = self._win.SetTimer(event_id, task._milliseconds)
        if not timer_id:
            raise ValueError("Out of timers")
        task._win_timer_id = timer_id
        self.tasks[event_id] = task

    def cancel(self, task):
        timer_id = task._win_timer_id
        if timer_id:
            self._win.KillTimer(timer_id)
            task._win_timer_id = None

    def OnTimer(self, event_id):
        #print "TimerWnd.OnTimer:", event_id
        task = self.tasks.get(event_id)
        if task:
            if not task._repeat:
                self.cancel(task)
            task._proc()
            #  We do this so that the application can't get starved of idle time
            #  by a repeatedly-firing Task:
            application()._win_idle()
Ejemplo n.º 13
0
class TimerWnd(object):

    def __init__(self):
        win = ui.CreateFrame()
        win.CreateWindow(None, "", 0, (0, 0, 0, 0))
        win.AttachObject(self)
        self._win = win
        self.tasks = WeakValueDictionary()
    
    def schedule(self, task):
        self.cancel(task)
        event_id = id(task)
        timer_id = self._win.SetTimer(event_id, task._milliseconds)
        if not timer_id:
            raise ValueError("Out of timers")
        task._win_timer_id = timer_id
        self.tasks[event_id] = task
    
    def cancel(self, task):
        timer_id = task._win_timer_id
        if timer_id:
            self._win.KillTimer(timer_id)
            task._win_timer_id = None
    
    def OnTimer(self, event_id):
        #print "TimerWnd.OnTimer:", event_id
        task = self.tasks.get(event_id)
        if task:
            if not task._repeat:
                self.cancel(task)
            task._proc()
            #  We do this so that the application can't get starved of idle time
            #  by a repeatedly-firing Task:
            application()._win_idle()
Ejemplo n.º 14
0
class HasWeaks(metaclass=HasWeaksMeta):

    """A mix-in to allow objects to store weak references to other objects.

    Each individual class or instance of a class that subclasses this will
    have a separate weak reference dict, there is no inheritance.

    """

    def __init__(self):
        super().__init__()
        self._get_weak_wr = WeakMethod(self._inst_get_weak)
        self._get_weak = lambda k: self._get_weak_wr()(k)
        self._set_weak_wr = WeakMethod(self._inst_set_weak)
        self._set_weak = lambda k, o: self._set_weak_wr()(k, o)
        self._del_weak_wr = WeakMethod(self._inst_del_weak)
        self._del_weak = lambda k: self._del_weak_wr()(k)
        self._weak_refs = WeakValueDictionary()

    def _inst_get_weak(self, name):
        return self._weak_refs.get(name)

    def _inst_set_weak(self, name, obj):
        if obj is None:
            self._del_weak(name)
        else:
            self._weak_refs[name] = obj

    def _inst_del_weak(self, name):
        if name in self._weak_refs:
            del self._weak_refs[name]
Ejemplo n.º 15
0
class _TransformExecutorServices(object):
    """Schedules and completes TransformExecutors.

  Controls the concurrency as appropriate for the applied transform the executor
  exists for.
  """
    def __init__(self, executor_service):
        # type: (_ExecutorService) -> None
        self._executor_service = executor_service
        self._scheduled = set()  # type: Set[TransformExecutor]
        self._parallel = _ParallelEvaluationState(self._executor_service,
                                                  self._scheduled)
        self._serial_cache = WeakValueDictionary(
        )  # type: WeakValueDictionary[Any, _SerialEvaluationState]

    def parallel(self):
        # type: () -> _ParallelEvaluationState
        return self._parallel

    def serial(self, step):
        # type: (Any) -> _SerialEvaluationState
        cached = self._serial_cache.get(step)
        if not cached:
            cached = _SerialEvaluationState(self._executor_service,
                                            self._scheduled)
            self._serial_cache[step] = cached
        return cached

    @property
    def executors(self):
        # type: () -> FrozenSet[TransformExecutor]
        return frozenset(self._scheduled)
Ejemplo n.º 16
0
class Cache:
	__slots__ = ['delay', 'rate', '_page', '_commission']

	def __init__(self, delay=0.25, rate=sqrt(2)):
		self.delay = delay
		self.rate = rate
		self._page = WeakValueDictionary()
		self._commission = WeakValueDictionary()

	def _backoff(self):
		return accumulate(chain([self.delay], repeat(self.rate)), mul)

	async def _download(self, session, url):
		for delay in self._backoff():
			try:
				res = await session.get(url, timeout=60, connection_timeout=15)
			except AsksException:
				pass
			except BrokenResourceError:
				pass
			except ConnectionError:
				pass
			except GAIError:
				pass
			else:
				# The server raises 404(!) on parameter error
				if res.status_code // 100 == 2: break
			await sleep(delay)
		encoding = (res.encoding.lower()
		            if 'charset' in res.headers.get('content-type')
		            else None) # FIXME unreliable (change in asks?)
		return res.content, encoding

	async def page(self, session, url):
		page = self._page.get(url)
		if page is None:
			content, encoding = await self._download(session, url)
			page = self._page[url] = BeautifulSoup(
				content, _PARSER, from_encoding=encoding)
		return page

	def commission(self, parent, url):
		comm = self._commission.get(url)
		if comm is None:
			comm = self._commission[url] = Commission(parent, url)
		assert comm.parent == parent
		return comm
Ejemplo n.º 17
0
class RecentFiles(object):
    numberOfFiles = 8
    separator = u";"

    def __init__(self, fileManager):
        self.fileManager = fileManager
        fileManager.onRead.connect(self.onReadOrWrite)
        fileManager.onWrite.connect(self.onReadOrWrite)

        self.files = self.retrieveList()

        self.menu = JMenu("Open Recent Program")
        self.menuActions = WeakValueDictionary()
        self.disabledItem = JMenuItem("(No recent programs)")
        self.disabledItem.enabled = False

        self.fillMenu()

    ### Maintaining the menu

    def onReadOrWrite(self, fileManager, filename, **_):
        try:
            self.files.remove(filename)
        except ValueError:
            if len(self.files) >= self.numberOfFiles:
                # Remove enough files that there's a spare slot.
                self.files = self.files[:self.numberOfFiles - 1]

        self.files.insert(0, filename)
        self.storeList(self.files)
        self.fillMenu()

    def fillMenu(self):
        self.menu.removeAll()
        if self.files:
            for filename in self.files:
                action = self.menuActions.get(filename)
                if action is None:
                    action = PythonAction(self.fileManager.readAction, filename,
                                          name=filename)
                    self.menuActions[filename] = action
                self.menu.add(action)
        else:
            self.menu.add(self.disabledItem)

    ### Writing filenames

    def retrieveList(self):
        joined = JESConfig.getInstance().getStringProperty(JESConfig.CONFIG_RECENT_FILES)
        if not joined:
            return []
        else:
            return [fn for fn in joined.split(self.separator) if os.path.isfile(fn)]

    def storeList(self, files):
        joined = self.separator.join(files)
        JESConfig.getInstance().setStringProperty(JESConfig.CONFIG_RECENT_FILES, joined)
Ejemplo n.º 18
0
class ObjectPool(object):
    """
        This class allows to fetch mvc model objects using their UUID.
        This requires to model to have a property called "uuid". All
        class inheriting from the base 'Model' class will have this.
        If implementing a custom model, the UUID property is responsible
        for the removal and addition to the pool when it changes values.
        Also see the UUIDProperty descriptor for an example implementation.
        We can use this to store complex relations between objects where 
        references to each other can be replaced with the UUID.
        For a multi-threaded version see ThreadedObjectPool. 
    """
    def __init__(self, *args, **kwargs):
        object.__init__(self)
        self._objects = WeakValueDictionary()

    def add_or_get_object(self, obj):
        try:
            self.add_object(obj, force=False, silent=False)
            return obj
        except KeyError:
            return self.get_object(obj.uuid)

    def add_object(self, obj, force=False, fail_on_duplicate=False):
        if not obj.uuid in self._objects or force:
            self._objects[obj.uuid] = obj
        elif fail_on_duplicate:
            raise KeyError(
                "UUID %s is already taken by another object %s, cannot add object %s"
                % (obj.uuid, self._objects[obj.uuid], obj))
        else:
            # Just change the objects uuid, will break refs, but
            # it prevents issues with inherited properties etc.
            logger.warning(
                "A duplicate UUID was passed to an ObjectPool for a %s object."
                % obj)
            obj.uuid = get_new_uuid()

    def change_all_uuids(self):
        # first get a copy off all uuids & objects:
        items = list(self._objects.items())
        for uuid, obj in items:  # @UnusedVariable
            obj.uuid = get_new_uuid()

    def remove_object(self, obj):
        if obj.uuid in self._objects and self._objects[obj.uuid] == obj:
            del self._objects[obj.uuid]

    def get_object(self, uuid):
        obj = self._objects.get(uuid, None)
        return obj

    def clear(self):
        self._objects.clear()
Ejemplo n.º 19
0
class ObjectRedis(object):
    """
    Extended Redis client, which additionally provides object-oriented
    operations and object caching.

    Objects are represented as hashes in the Redis database. The translation
    from a hash to an object is carried out by a given `decode` function.

    When `caching` is enabled, objects loaded from the Redis database are cached
    and subsequently retrieved from the cache. An object stays in the cache as
    long as there is a reference to it and it is automatically removed when the
    Python interpreter destroys it. Thus, it is guaranteed that getting the same
    key multiple times will yield the identical object.

    Attributes:

     * `r`: Underlying Redis client. Read-Only.
     * `decode`: function, which decodes an object from a Redis hash. It is
       called with the hash (a `dict`) as single argument. Read-Only.
     * `caching`: switch to enable / disable object caching.
    """

    # TODO: add oset and omset

    def __init__(self, r, decode, caching=True):
        self.r = r
        self.decode = decode
        self.caching = caching
        self._cache = WeakValueDictionary()

    def oget(self, key):
        """
        Get the object for `key`.
        """
        object = self._cache.get(key) if self.caching else None
        if not object:
            hash = self.hgetall(key)
            if hash:
                object = self.decode(hash)
                if self.caching:
                    self._cache[key] = object
        return object

    def omget(self, keys):
        """
        Get the objects for all specified `keys`.
        """
        # TODO: make atomic
        return [self.oget(k) for k in keys]

    def __getattr__(self, name):
        return getattr(self.r, name)
Ejemplo n.º 20
0
class ObjectPool(object):
    """
        This class allows to fetch mvc model objects using their UUID.
        This requires to model to have a property called "uuid". All
        class inheriting from the base 'Model' class will have this.
        If implementing a custom model, the UUID property is responsible
        for the removal and addition to the pool when it changes values.
        Also see the UUIDPropIntel class for an example implementation.
        We can use this to store complex relations between objects where 
        references to each other can be replaced with the UUID.
        For a multi-threaded version see ThreadedObjectPool. 
    """

    def __init__(self, *args, **kwargs):
        object.__init__(self)
        self._objects = WeakValueDictionary()

    def add_or_get_object(self, obj):
        try:
            self.add_object(obj, force=False, silent=False)
            return obj
        except KeyError:
            return self.get_object(obj.uuid)

    def add_object(self, obj, force=False, fail_on_duplicate=False):
        if not obj.uuid in self._objects or force:
            self._objects[obj.uuid] = obj
        elif fail_on_duplicate:
            raise KeyError, "UUID %s is already taken by another object %s, cannot add object %s" % (obj.uuid, self._objects[obj.uuid], obj)
        else:
            # Just change the objects uuid, will break refs, but
            # it prevents issues with inherited properties etc.
            logger.warning("A duplicate UUID was passed to an ObjectPool for a %s object." % obj)
            obj.uuid = get_new_uuid()

    def change_all_uuids(self):
        # first get a copy off all uuids & objects:
        items = self._objects.items()
        for uuid, obj in items: # @UnusedVariable
            obj.uuid = get_new_uuid()

    def remove_object(self, obj):
        if obj.uuid in self._objects and self._objects[obj.uuid] == obj:
            del self._objects[obj.uuid]

    def get_object(self, uuid):
        obj = self._objects.get(uuid, None)
        return obj

    def clear(self):
        self._objects.clear()
Ejemplo n.º 21
0
class factory:
    """package generator

    does weakref caching per repository

    :cvar child_class: callable to generate packages
    """

    child_class = package

    def __init__(self, parent_repo):
        self._parent_repo = parent_repo
        self._cached_instances = WeakValueDictionary()

    def new_package(self, *args):
        """generate a new package instance"""
        inst = self._cached_instances.get(args)
        if inst is None:
            inst = self._cached_instances[args] = self.child_class(self, *args)
        return inst

    def __call__(self, *args, **kwds):
        return self.new_package(*args, **kwds)

    def clear(self):
        """wipe the weakref cache of packages instances"""
        self._cached_instances.clear()

    def _get_metadata(self, *args):
        """Pulls metadata from the repo/cache/wherever.

        Must be overridden in derivatives.
        """
        raise NotImplementedError

    def _update_metadata(self, *args):
        """Updates metadata in the repo/cache/wherever.

        Must be overridden in derivatives.
        """
        raise NotImplementedError

    def __getstate__(self):
        d = self.__dict__.copy()
        del d['_cached_instances']
        return d

    def __setstate__(self, state):
        self.__dict__ = state.copy()
        self.__dict__['_cached_instances'] = WeakValueDictionary()
Ejemplo n.º 22
0
class DialogManager:
    """ Opens and tracks Qt dialogs. """
    def __init__(self, parent: QWidget) -> None:
        self._parent = parent  # All dialogs will be children of this widget.
        self._dialogs = WeakValueDictionary(
        )  # Tracks a single instance of each dialog by its opener.

    def attach(self, dialog: QDialog) -> None:
        """ Attach <dialog> to our parent, preserving its window flags. """
        flags = dialog.windowFlags()
        dialog.setParent(self._parent)
        dialog.setWindowFlags(flags)

    def open_unique(self, opener: DialogOpener) -> None:
        """ If a previous dialog is open, set focus on it, otherwise open a new one using <opener>. """
        key = getattr(opener, '__qualname__', id(opener))
        dialog = self._dialogs.get(key)
        if dialog is not None and not dialog.isHidden():
            dialog.activateWindow()
        else:
            dialog = self._dialogs[key] = opener()
            self.attach(dialog)
            dialog.show()

    def yes_or_no(self, title: str, message: str) -> bool:
        """ Present a modal yes/no dialog and return the user's response as True/False. """
        yes, no = QMessageBox.Yes, QMessageBox.No
        button = QMessageBox.question(self._parent, title, message, yes | no)
        return button == yes

    def open_file(self, title="Open File", file_exts="", start_dir=".") -> str:
        """ Present a modal dialog for the user to select a file to open.
            Return the selected filename, or an empty string on cancel. """
        return QFileDialog.getOpenFileName(self._parent, title, start_dir,
                                           _filter_str(file_exts))[0]

    def open_files(self,
                   title="Open Files",
                   file_exts="",
                   start_dir=".") -> List[str]:
        """ Present a modal dialog for the user to select multiple files to open.
            Return a list of selected filenames, or an empty list on cancel. """
        return QFileDialog.getOpenFileNames(self._parent, title, start_dir,
                                            _filter_str(file_exts))[0]

    def save_file(self, title="Save File", file_exts="", start_dir=".") -> str:
        """ Present a modal dialog for the user to select a file to save.
            Return the selected filename, or an empty string on cancel. """
        return QFileDialog.getSaveFileName(self._parent, title, start_dir,
                                           _filter_str(file_exts))[0]
Ejemplo n.º 23
0
Archivo: windows.py Proyecto: zerkz/LSP
class WindowRegistry(object):
    def __init__(self, configs: GlobalConfigs, documents: Any,
                 session_starter: Callable, sublime: Any,
                 handler_dispatcher: LanguageHandlerListener) -> None:
        self._windows = WeakValueDictionary(
        )  # type: WeakValueDictionary[int, WindowManager]
        self._configs = configs
        self._documents = documents
        self._session_starter = session_starter
        self._sublime = sublime
        self._handler_dispatcher = handler_dispatcher
        self._diagnostics_ui_class = None  # type: Optional[Callable]
        self._server_panel_factory = None  # type: Optional[Callable]
        self._settings = None  # type: Optional[Settings]

    def set_diagnostics_ui(self, ui_class: Any) -> None:
        self._diagnostics_ui_class = ui_class

    def set_server_panel_factory(self, factory: Callable) -> None:
        self._server_panel_factory = factory

    def set_settings_factory(self, settings: Settings) -> None:
        self._settings = settings

    def lookup(self, window: Any) -> WindowManager:
        state = self._windows.get(window.id())
        if state is None:
            if not self._settings:
                raise RuntimeError("no settings")
            workspace = ProjectFolders(window)
            window_configs = self._configs.for_window(window)
            window_documents = self._documents.for_window(
                window, workspace, window_configs)
            diagnostics_ui = self._diagnostics_ui_class(
                window,
                window_documents) if self._diagnostics_ui_class else None
            state = WindowManager(
                window=window,
                workspace=workspace,
                settings=self._settings,
                configs=window_configs,
                documents=window_documents,
                diagnostics=DiagnosticsStorage(diagnostics_ui),
                session_starter=self._session_starter,
                sublime=self._sublime,
                handler_dispatcher=self._handler_dispatcher,
                server_panel_factory=self._server_panel_factory)
            self._windows[window.id()] = state
        return state
Ejemplo n.º 24
0
class NodeManager:
    _all: WeakNodeSet
    _by_id: WeakNodeDictionary
    _all_num: int
    _delegate_num: int

    def __init__(self):
        super().__init__()
        self._all = WeakSet()
        self._by_id = WeakValueDictionary()
        self._all_num = 0
        self._delegate_num = 0

    def register(self, node: BaseNode):
        assert node not in self._all, f'duplicate node {node!r}'
        assert node not in self._by_id, f'duplicate node ID {node.id!r}'
        self._all.add(node)
        self._by_id[node.id] = node
        self._all_num += 1
        if node.is_delegate:
            self._delegate_num += 1

    def get_node(self,
                 id: int,
                 default: Optional[BaseNode] = None) -> Optional[BaseNode]:
        return self._by_id.get(id, default=default)

    def get_delegates(self):
        return {node for node in self._all if node.is_delegate}

    def block(self, id: int):
        node = self.get_node(id)
        if node:
            node.is_blacked = True

    def nodes(self) -> Set[BaseNode]:
        return set(self._all)

    @property
    def delegate_num(self) -> int:
        return self._delegate_num

    @property
    def all_num(self) -> int:
        return self._all_num

    def __len__(self) -> int:
        return self._all_num
Ejemplo n.º 25
0
class DialogManager:
    """ Tracks standard dialogs and opens common modal dialogs and returns their results. """

    _DEFAULT_DIALOG_FLAGS = Qt.CustomizeWindowHint | Qt.Dialog | Qt.WindowTitleHint | Qt.WindowCloseButtonHint

    def __init__(self, parent: QWidget) -> None:
        self._parent = parent  # All dialogs will be children of this widget.
        self._dialogs_by_cls = WeakValueDictionary(
        )  # Contains the current instance of each dialog class.

    def load(self,
             dlg_cls: Type[QDialog],
             flags=_DEFAULT_DIALOG_FLAGS) -> Optional[QDialog]:
        """ If a previous <dlg_cls> is open, set focus on it and return None, otherwise return a new one. """
        dialog = self._dialogs_by_cls.get(dlg_cls)
        if dialog is not None and not dialog.isHidden():
            dialog.show()
            dialog.activateWindow()
            return None
        dialog = self._dialogs_by_cls[dlg_cls] = dlg_cls(self._parent, flags)
        return dialog

    def yes_or_no(self, title: str, message: str) -> bool:
        """ Present a yes/no dialog and return the user's response as a bool. """
        yes, no = QMessageBox.Yes, QMessageBox.No
        button = QMessageBox.question(self._parent, title, message, yes | no)
        return button == yes

    def open_file(self, title="Open File", file_exts="", start_dir=".") -> str:
        """ Present a modal dialog for the user to select a file to open.
            Return the selected filename, or an empty string on cancel. """
        return QFileDialog.getOpenFileName(self._parent, title, start_dir,
                                           _filter_str(file_exts))[0]

    def open_files(self,
                   title="Open Files",
                   file_exts="",
                   start_dir=".") -> List[str]:
        """ Present a modal dialog for the user to select multiple files to open.
            Return a list of selected filenames, or an empty list on cancel. """
        return QFileDialog.getOpenFileNames(self._parent, title, start_dir,
                                            _filter_str(file_exts))[0]

    def save_file(self, title="Save File", file_exts="", start_dir=".") -> str:
        """ Present a modal dialog for the user to select a file to save.
            Return the selected filename, or an empty string on cancel. """
        return QFileDialog.getSaveFileName(self._parent, title, start_dir,
                                           _filter_str(file_exts))[0]
Ejemplo n.º 26
0
class BaseContextualizable(object):

    def __init__(self, *args, **kwargs):
        super(BaseContextualizable, self).__init__(*args, **kwargs)

        if not hasattr(self, '_contexts'):
            self._contexts = WeakValueDictionary()

    @property
    def context(self):
        return None

    def contextualize(self, context):
        """
        Return an object with the given context. If the provided ``context`` is
        `None`, then `self` MUST be returned unmodified.

        It is generally not correct to set a field on the object and return the
        same object as this would change the context for other users of the
        object. Also, returning a copy of the object is usually inappropriate
        for mutable objects. Immutable objects may maintain a 'context'
        property and return a copy of themselves with that property set to the
        provided ``context`` argument.
        """
        ctxd = self._contexts.get(context)
        if ctxd is not None:
            return ctxd
        ctxd = self.contextualize_augment(context)
        self._contexts[context] = ctxd
        return ctxd

    def decontextualize(self):
        """
        Return the object with all contexts removed
        """
        return self

    def add_contextualization(self, context, contextualization):
        try:
            self._contexts[context] = contextualization
        except AttributeError:
            self._contexts = WeakValueDictionary()
            self._contexts[context] = contextualization

    def contextualize_augment(self, context):
        return self
Ejemplo n.º 27
0
class BaseContextualizable(object):

    def __init__(self, *args, **kwargs):
        super(BaseContextualizable, self).__init__(*args, **kwargs)

        if not hasattr(self, '_contexts'):
            self._contexts = WeakValueDictionary()

    @property
    def context(self):
        return None

    def contextualize(self, context):
        """
        Return an object with the given context. If the provided ``context`` is
        `None`, then `self` MUST be returned unmodified.

        It is generally not correct to set a field on the object and return the
        same object as this would change the context for other users of the
        object. Also, returning a copy of the object is usually inappropriate
        for mutable objects. Immutable objects may maintain a 'context'
        property and return a copy of themselves with that property set to the
        provided ``context`` argument.
        """
        ctxd = self._contexts.get(context)
        if ctxd is not None:
            return ctxd
        ctxd = self.contextualize_augment(context)
        self._contexts[context] = ctxd
        return ctxd

    def decontextualize(self):
        """
        Return the object with all contexts removed
        """
        return self

    def add_contextualization(self, context, contextualization):
        try:
            self._contexts[context] = contextualization
        except AttributeError:
            self._contexts = WeakValueDictionary()
            self._contexts[context] = contextualization

    def contextualize_augment(self, context):
        return self
Ejemplo n.º 28
0
class ThreadLocalEntityCache(local):
    def __init__(self):
        self.lock = Lock()
        self._dict = WeakValueDictionary()

    def __contains__(self, key):
        return key in self._dict

    def __getitem__(self, key):
        return self._dict[key]

    def get(self, key, default=None):
        return self._dict.get(key, default)

    def clear(self):
        self._dict.clear()

    def keys(self):
        return self._dict.keys()

    def update(self, key, value):
        """ Extract, insert or remove a value for a given key.
        """
        with self.lock:
            if value is None:
                # remove
                try:
                    del self._dict[key]
                except KeyError:
                    pass
                else:
                    return None
            elif callable(value):
                try:
                    # extract
                    return self._dict[key]
                except KeyError:
                    # construct and insert
                    new_value = value()
                    self._dict[key] = new_value
                    return new_value
            else:
                # insert or replace
                self._dict[key] = value
                return value
Ejemplo n.º 29
0
class Cache(object):

    def __init__(self, config):
        self._entries = dict()

        # indexed by (domain, user) tuple
        self._entry_by_domain = WeakValueDictionary()

        self._config = config

    def set(self, service, username, password, timeout=None):
        expire = None
        if timeout:
            expire = int(time.time()) + timeout
            
        entry = CacheEntry(username, password, expire)
        self._entries[service] = entry

        domain = self._config.get_domain_for_service(service) 
        if domain is not None:
            logging.debug("updating password for domain,user = %s,%s", domain, username)
            self._entry_by_domain[(domain, username)] = entry

    def get(self, service):
        entry = self._entries.get(service)
        if entry is None:
            domain   = self._config.get_domain_for_service(service)
            username = self._config.get_user_for_service(service)

            if domain and username:
                logging.debug("searching in domain %s", domain)
                entry = self._entry_by_domain.get((domain, username))
                
        if entry is None:
            return None

        now = int(time.time())
        if entry.expire and entry.expire <= now:
            del self._entries[service]
            # no need to delete from domain dict since is a weakref values
            return None
        
        return entry
Ejemplo n.º 30
0
class FileServer(MessageDispatcher):
    """Serve files via an http server."""
    
    FILE_CONTENT = "/file/"
    
    def __init__(self, port=8001):
        """Create a new file server."""
        self.app = Flask(self.__class__.__name__)
        self.files = WeakValueDictionary()
        self.app.route(self.FILE_CONTENT + "<id>")(self.serve_file_content_by_id)
        self.port = port
        
    def serve_file_content_by_id(self, id):
        """Serve a file by an id."""
        file = self.files.get(id, NoFile())
        return file.flask_send_file()
    
    def add_file(self, file):
        """Serve a file until noone needs it."""
        self.files[str(file.get_id())] = file
        file.served_by(self)
    
    def receive_new_image(self, message):
        """React to new images."""
        image = message["image"]
        self.add_file(image)

    def get_file_content_path(self, file):
        """Return the path component for the file."""
        return self.FILE_CONTENT + str(file.get_id())
    
    def get_port(self):
        """Return the port used to serve the files."""
        return self.port
    
    def run(self):
        """Run the server."""
        self.app.run(host="0.0.0.0", port=self.port)
       
    def run_in_parallel(self):
        """Run the server in parellel."""
        thread = threading.Thread(target=self.run, daemon=True)
        thread.start()
Ejemplo n.º 31
0
class SharedSinkProvider(SinkProviderBase):
    def __init__(self, key_selector):
        self._key_selector = key_selector
        self._cache = WeakValueDictionary()
        super(SharedSinkProvider, self).__init__()

    def CreateSink(self, properties):
        key = self._key_selector(properties)
        if key:
            sink = self._cache.get(key)
            if not sink:
                new_sink = self.next_provider.CreateSink(properties)
                sink = RefCountedSink(new_sink)
                self._cache[key] = sink
            return sink
        else:
            return self.next_provider.CreateSink(properties)

    @property
    def sink_class(self):
        return self.next_provider.sink_class
Ejemplo n.º 32
0
class WeakLocks(object):
    """
    A cache of DeferredLocks that gets garbage collected after the lock has
    been utilized
    """

    def __init__(self):
        self._locks = WeakValueDictionary()

    def get_lock(self, key):
        """
        Get lock based on key. If no lock exists, create one.

        :param key: Some arbitrary key
        :return: :class:`DeferredLock`
        """
        lock = self._locks.get(key)
        if not lock:
            lock = DeferredLock()
            self._locks[key] = lock
        return lock
Ejemplo n.º 33
0
class SharedSinkProvider(SinkProviderBase):
  def __init__(self, key_selector):
    self._key_selector = key_selector
    self._cache = WeakValueDictionary()
    super(SharedSinkProvider, self).__init__()

  def CreateSink(self, properties):
    key = self._key_selector(properties)
    if key:
      sink = self._cache.get(key)
      if not sink:
        new_sink = self.next_provider.CreateSink(properties)
        sink = RefCountedSink(new_sink)
        self._cache[key] = sink
      return sink
    else:
      return self.next_provider.CreateSink(properties)

  @property
  def sink_class(self):
    return self.next_provider.sink_class
Ejemplo n.º 34
0
class Index2Source(IPackSource):
    @property
    def index(self):
        return self._index

    @property
    def pack(self):
        return self._pack

    def __init__(self, pack, index):
        self._pack = pack
        self._index = index
        self._files = WeakValueDictionary()
        self._file_path_map = {}

    def file_exists(self, path_or_hash):
        if isinstance(path_or_hash, str):
            path_or_hash = _compute_hash(path_or_hash)
        return path_or_hash in self.index.files

    def get_file(self, path_or_hash):
        def from_hash(hash):
            file = self._files.get(hash, None)
            if file is not None:
                return file

            index = self.index.files[hash]
            file = FileFactory.get(self.pack, index)
            self._files[hash] = file
            return file

        if isinstance(path_or_hash, str):
            f = from_hash(_compute_hash(path_or_hash))
            f.path = path_or_hash
            return f
        return from_hash(path_or_hash)

    def __iter__(self):
        for file in self.index.files.values():
            yield self.get_file(file.file_key)
Ejemplo n.º 35
0
class StoreCache(object):
    '''
    Cache of stores previously cached by a `BDS <BundleDependencyStore>`.

    We don't want to keep hold of a store if there's no BDS using it, so we only reference
    the stores weakly.
    '''
    def __init__(self):
        self._cache = WeakValueDictionary()

        self._refcounts = dict()
        '''
        Counts for references to cached BDS stores. Needed so we know when we can do clean-up.
        '''

    def check_out(self, key):
        self._refcounts[key] = self._refcounts.get(key, 0) + 1

    def check_in(self, key):
        rc = self._refcounts[key]
        if rc == 1:
            del self._refcounts[key]
        else:
            self._refcounts[key] = rc - 1
        return rc - 1

    def refcount(self, key):
        return self._refcounts.get(key, 0)

    def get(self, key, default=None):
        return self._cache.get(key, default)

    def __getitem__(self, key):
        return self._cache[key]

    def __setitem__(self, key, val):
        if key in self._cache:
            raise Exception(f"There's already an entry for {key}")
        self._cache[key] = val
Ejemplo n.º 36
0
class Layout(object):
    def __init__(self):
        self.solver = Solver()
        self.introns = WeakValueDictionary()

    def update(self, element):
        self.solver.autosolve = False
        for constraint in element.constraints[1]:
            self.solver.remove_constraint(constraint)
        for constraint in element.constraints[0]:
            self.solver.add_constraint(constraint)
        element.constraints = [], element.constraints[0]
        self.solver.autosolve = True

    def add_intron(self, source, intron):
        self.introns[source] = intron

    def pop_intron(self, source):
        return self.introns.pop(source)

    def get_intron(self, source, otherwise=None):
        return self.introns.get(source, otherwise)
Ejemplo n.º 37
0
class FileYAMLReader(AbstractYAMLReader):
    """Implementation of the YAML reader."""
    def __init__(self,
                 config_files,
                 filter_parameters=None,
                 filter_filenames=True,
                 **kwargs):
        super(FileYAMLReader, self).__init__(config_files)

        self.file_handlers = {}
        self.filter_filenames = self.info.get('filter_filenames',
                                              filter_filenames)
        self.filter_parameters = filter_parameters or {}
        if kwargs:
            logger.warning(
                "Unrecognized/unused reader keyword argument(s) '{}'".format(
                    kwargs))
        self.coords_cache = WeakValueDictionary()

    @property
    def sensor_names(self):
        if not self.file_handlers:
            return self.info['sensors']

        file_handlers = (handlers[0]
                         for handlers in self.file_handlers.values())
        sensor_names = set()
        for fh in file_handlers:
            try:
                sensor_names.update(fh.sensor_names)
            except NotImplementedError:
                continue
        if not sensor_names:
            return self.info['sensors']
        return sorted(sensor_names)

    @property
    def available_dataset_ids(self):
        for ds_id in self.all_dataset_ids:
            fts = self.ids[ds_id]["file_type"]
            if isinstance(fts, str) and fts in self.file_handlers:
                yield ds_id
            elif any(ft in self.file_handlers for ft in fts):
                yield ds_id

    @property
    def start_time(self):
        if not self.file_handlers:
            raise RuntimeError("Start time unknown until files are selected")
        return min(x[0].start_time for x in self.file_handlers.values())

    @property
    def end_time(self):
        if not self.file_handlers:
            raise RuntimeError("End time unknown until files are selected")
        return max(x[-1].end_time for x in self.file_handlers.values())

    @staticmethod
    def check_file_covers_area(file_handler, check_area):
        """Checks if the file covers the current area.

        If the file doesn't provide any bounding box information or 'area'
        was not provided in `filter_parameters`, the check returns True.
        """
        try:
            gbb = Boundary(*file_handler.get_bounding_box())
        except NotImplementedError as err:
            logger.debug("Bounding box computation not implemented: %s",
                         str(err))
        else:
            abb = AreaDefBoundary(get_area_def(check_area), frequency=1000)

            intersection = gbb.contour_poly.intersection(abb.contour_poly)
            if not intersection:
                return False
        return True

    def find_required_filehandlers(self, requirements, filename_info):
        """Find the necessary fhs for the current filehandler.

        We assume here requirements are available.
        """
        req_fh = []
        if requirements:
            for requirement in requirements:
                for fhd in self.file_handlers[requirement]:
                    # FIXME: Isn't this super wasteful? filename_info.items()
                    # every iteration?
                    if (all(item in filename_info.items()
                            for item in fhd.filename_info.items())):
                        req_fh.append(fhd)
                        break
                else:
                    raise RuntimeError('No matching file in ' + requirement)
                    # break everything and continue to next
                    # filetype!
        return req_fh

    def sorted_filetype_items(self):
        """Sort the instance's filetypes in using order."""
        processed_types = []
        file_type_items = deque(self.config['file_types'].items())
        while len(file_type_items):
            filetype, filetype_info = file_type_items.popleft()

            requirements = filetype_info.get('requires')
            if requirements is not None:
                # requirements have not been processed yet -> wait
                missing = [
                    req for req in requirements if req not in processed_types
                ]
                if missing:
                    file_type_items.append((filetype, filetype_info))
                    continue

            processed_types.append(filetype)
            yield filetype, filetype_info

    @staticmethod
    def filename_items_for_filetype(filenames, filetype_info):
        """Iterator over the filenames matching *filetype_info*."""
        matched_files = []
        for pattern in filetype_info['file_patterns']:
            for filename in match_filenames(filenames, pattern):
                if filename in matched_files:
                    continue
                try:
                    filename_info = parse(pattern,
                                          get_filebase(filename, pattern))
                except ValueError:
                    logger.debug("Can't parse %s with %s.", filename, pattern)
                    continue
                matched_files.append(filename)
                yield filename, filename_info

    def new_filehandler_instances(self, filetype_info, filename_items):
        """Generate new filehandler instances."""
        requirements = filetype_info.get('requires')
        filetype_cls = filetype_info['file_reader']
        for filename, filename_info in filename_items:
            try:
                req_fh = self.find_required_filehandlers(
                    requirements, filename_info)
            except RuntimeError:
                logger.warning("Can't find requirements for %s", filename)
                continue
            except KeyError:
                logger.warning("Missing requirements for %s", filename)
                continue

            yield filetype_cls(filename, filename_info, filetype_info, *req_fh)

    def time_matches(self, fstart, fend):
        start_time = self.filter_parameters.get('start_time')
        end_time = self.filter_parameters.get('end_time')
        fend = fend or fstart
        if start_time and fend and fend < start_time:
            return False
        if end_time and fstart and fstart > end_time:
            return False
        return True

    def metadata_matches(self, sample_dict, file_handler=None):
        # special handling of start/end times
        if not self.time_matches(sample_dict.get('start_time'),
                                 sample_dict.get('end_time')):
            return False
        for key, val in self.filter_parameters.items():
            if key != 'area' and key not in sample_dict:
                continue

            if key in ['start_time', 'end_time']:
                continue
            elif key == 'area' and file_handler:
                if not self.check_file_covers_area(file_handler, val):
                    logger.info('Filtering out %s based on area',
                                file_handler.filename)
                    break
            elif key in sample_dict and val != sample_dict[key]:
                # don't use this file
                break
        else:
            # all the metadata keys are equal
            return True
        return False

    def filter_filenames_by_info(self, filename_items):
        """Filter out file using metadata from the filenames.

        Currently only uses start and end time. If only start time is available
        from the filename, keep all the filename that have a start time before
        the requested end time.
        """
        for filename, filename_info in filename_items:
            fend = filename_info.get('end_time')
            fstart = filename_info.setdefault('start_time', fend)
            if fend and fend < fstart:
                # correct for filenames with 1 date and 2 times
                fend = fend.replace(year=fstart.year,
                                    month=fstart.month,
                                    day=fstart.day)
                filename_info['end_time'] = fend
            if self.metadata_matches(filename_info):
                yield filename, filename_info

    def filter_fh_by_metadata(self, filehandlers):
        """Filter out filehandlers using provide filter parameters."""
        for filehandler in filehandlers:
            filehandler.metadata['start_time'] = filehandler.start_time
            filehandler.metadata['end_time'] = filehandler.end_time
            if self.metadata_matches(filehandler.metadata, filehandler):
                yield filehandler

    def filter_selected_filenames(self, filenames):
        for filetype, filetype_info in self.sorted_filetype_items():
            filename_iter = self.filename_items_for_filetype(
                filenames, filetype_info)
            if self.filter_filenames:
                filename_iter = self.filter_filenames_by_info(filename_iter)

            for fn, _ in filename_iter:
                yield fn

    def new_filehandlers_for_filetype(self, filetype_info, filenames):
        """Create filehandlers for a given filetype."""
        filename_iter = self.filename_items_for_filetype(
            filenames, filetype_info)
        if self.filter_filenames:
            # preliminary filter of filenames based on start/end time
            # to reduce the number of files to open
            filename_iter = self.filter_filenames_by_info(filename_iter)
        filehandler_iter = self.new_filehandler_instances(
            filetype_info, filename_iter)
        filtered_iter = self.filter_fh_by_metadata(filehandler_iter)
        return list(filtered_iter)

    def create_filehandlers(self, filenames):
        """Organize the filenames into file types and create file handlers."""
        filenames = list(OrderedDict.fromkeys(filenames))
        logger.debug("Assigning to %s: %s", self.info['name'], filenames)

        self.info.setdefault('filenames', []).extend(filenames)
        filename_set = set(filenames)

        # load files that we know about by creating the file handlers
        for filetype, filetype_info in self.sorted_filetype_items():
            filehandlers = self.new_filehandlers_for_filetype(
                filetype_info, filename_set)

            filename_set -= set([fhd.filename for fhd in filehandlers])
            if filehandlers:
                self.file_handlers[filetype] = sorted(
                    filehandlers,
                    key=lambda fhd: (fhd.start_time, fhd.filename))

        # update existing dataset IDs with information from the file handler
        self.update_ds_ids_from_file_handlers()

        # load any additional dataset IDs determined dynamically from the file
        self.add_ds_ids_from_files()

    def update_ds_ids_from_file_handlers(self):
        """Update DatasetIDs with information from loaded files.

        This is useful, for example, if dataset resolution may change
        depending on what files were loaded.

        """
        for file_handlers in self.file_handlers.values():
            fh = file_handlers[0]
            # update resolution in the dataset IDs for this files resolution
            res = getattr(fh, 'resolution', None)
            if res is None:
                continue

            for ds_id, ds_info in list(self.ids.items()):
                if fh.filetype_info['file_type'] != ds_info['file_type']:
                    continue
                if ds_id.resolution is not None:
                    continue
                ds_info['resolution'] = res
                new_id = DatasetID.from_dict(ds_info)
                self.ids[new_id] = ds_info
                del self.ids[ds_id]

    def add_ds_ids_from_files(self):
        """Check files for more dynamically discovered datasets."""
        for file_handlers in self.file_handlers.values():
            try:
                fh = file_handlers[0]
                avail_ids = fh.available_datasets()
            except NotImplementedError:
                continue

            # dynamically discover other available datasets
            for ds_id, ds_info in avail_ids:
                # don't overwrite an existing dataset
                # especially from the yaml config
                coordinates = ds_info.get('coordinates')
                if isinstance(coordinates, list):
                    # xarray doesn't like concatenating attributes that are
                    # lists: https://github.com/pydata/xarray/issues/2060
                    ds_info['coordinates'] = tuple(ds_info['coordinates'])
                self.ids.setdefault(ds_id, ds_info)

    @staticmethod
    def _load_dataset(dsid, ds_info, file_handlers, dim='y'):
        """Load only a piece of the dataset."""
        slice_list = []
        failure = True
        for fh in file_handlers:
            try:
                projectable = fh.get_dataset(dsid, ds_info)
                if projectable is not None:
                    slice_list.append(projectable)
                    failure = False
            except KeyError:
                logger.warning("Failed to load {} from {}".format(dsid, fh),
                               exc_info=True)

        if failure:
            raise KeyError(
                "Could not load {} from any provided files".format(dsid))

        if dim not in slice_list[0].dims:
            return slice_list[0]
        res = xr.concat(slice_list, dim=dim)

        combined_info = file_handlers[0].combine_info(
            [p.attrs for p in slice_list])

        res.attrs = combined_info
        return res

    def _load_dataset_data(self,
                           file_handlers,
                           dsid,
                           xslice=slice(None),
                           yslice=slice(None)):
        ds_info = self.ids[dsid]
        proj = self._load_dataset(dsid, ds_info, file_handlers)
        # FIXME: areas could be concatenated here
        # Update the metadata
        proj.attrs['start_time'] = file_handlers[0].start_time
        proj.attrs['end_time'] = file_handlers[-1].end_time

        return proj

    def _preferred_filetype(self, filetypes):
        """Get the preferred filetype out of the *filetypes* list.

        At the moment, it just returns the first filetype that has been loaded.
        """
        if not isinstance(filetypes, list):
            filetypes = [filetypes]

        # look through the file types and use the first one that we have loaded
        for filetype in filetypes:
            if filetype in self.file_handlers:
                return filetype
        return None

    def _load_area_def(self, dsid, file_handlers):
        """Load the area definition of *dsid*."""
        area_defs = [fh.get_area_def(dsid) for fh in file_handlers]
        area_defs = [
            area_def for area_def in area_defs if area_def is not None
        ]

        final_area = StackedAreaDefinition(*area_defs)
        return final_area.squeeze()

    def _get_coordinates_for_dataset_key(self, dsid):
        """Get the coordinate dataset keys for *dsid*."""
        ds_info = self.ids[dsid]
        cids = []

        for cinfo in ds_info.get('coordinates', []):
            if not isinstance(cinfo, dict):
                cinfo = {'name': cinfo}

            cinfo['resolution'] = ds_info['resolution']
            if 'polarization' in ds_info:
                cinfo['polarization'] = ds_info['polarization']
            cid = DatasetID(**cinfo)
            cids.append(self.get_dataset_key(cid))

        return cids

    def _get_coordinates_for_dataset_keys(self, dsids):
        """Get all coordinates."""
        coordinates = {}
        for dsid in dsids:
            cids = self._get_coordinates_for_dataset_key(dsid)
            coordinates.setdefault(dsid, []).extend(cids)
        return coordinates

    def _get_file_handlers(self, dsid):
        """Get the file handler to load this dataset."""
        ds_info = self.ids[dsid]

        filetype = self._preferred_filetype(ds_info['file_type'])
        if filetype is None:
            logger.warning(
                "Required file type '%s' not found or loaded for "
                "'%s'", ds_info['file_type'], dsid.name)
        else:
            return self.file_handlers[filetype]

    def _make_area_from_coords(self, coords):
        """Create an appropriate area with the given *coords*."""
        if len(coords) == 2:
            lon_sn = coords[0].attrs.get('standard_name')
            lat_sn = coords[1].attrs.get('standard_name')
            if lon_sn == 'longitude' and lat_sn == 'latitude':
                key = None
                try:
                    key = (coords[0].data.name, coords[1].data.name)
                    sdef = self.coords_cache.get(key)
                except AttributeError:
                    sdef = None
                if sdef is None:
                    sdef = SwathDefinition(*coords)
                    if key is not None:
                        self.coords_cache[key] = sdef
                sensor_str = '_'.join(self.info['sensors'])
                shape_str = '_'.join(map(str, coords[0].shape))
                sdef.name = "{}_{}_{}_{}".format(sensor_str, shape_str,
                                                 coords[0].attrs['name'],
                                                 coords[1].attrs['name'])
                return sdef
            else:
                raise ValueError(
                    'Coordinates info object missing standard_name key: ' +
                    str(coords))
        elif len(coords) != 0:
            raise NameError("Don't know what to do with coordinates " +
                            str(coords))

    def _load_dataset_area(self, dsid, file_handlers, coords):
        """Get the area for *dsid*."""
        try:
            return self._load_area_def(dsid, file_handlers)
        except NotImplementedError:
            if any(x is None for x in coords):
                logger.warning(
                    "Failed to load coordinates for '{}'".format(dsid))
                return None

            area = self._make_area_from_coords(coords)
            if area is None:
                logger.debug("No coordinates found for %s", str(dsid))
            return area

    # TODO: move this out of here.
    def _get_slices(self, area):
        """Get the slices of raw data covering area.

        Args:
            area: the area to slice.

        Returns:
            slice_kwargs: kwargs to pass on to loading giving the span of the
                data to load.
            area: the trimmed area corresponding to the slices.
        """
        slice_kwargs = {}

        if area is not None and self.filter_parameters.get('area') is not None:
            try:
                slices = get_area_slices(area, self.filter_parameters['area'])
                area = get_sub_area(area, *slices)
                slice_kwargs['xslice'], slice_kwargs['yslice'] = slices
            except (NotImplementedError, AttributeError):
                logger.info("Cannot compute specific slice of data to load.")

        return slice_kwargs, area

    def _load_dataset_with_area(self, dsid, coords):
        """Loads *dsid* and it's area if available."""
        file_handlers = self._get_file_handlers(dsid)
        if not file_handlers:
            return

        area = self._load_dataset_area(dsid, file_handlers, coords)
        slice_kwargs, area = self._get_slices(area)

        try:
            ds = self._load_dataset_data(file_handlers, dsid, **slice_kwargs)
        except (KeyError, ValueError) as err:
            logger.exception("Could not load dataset '%s': %s", dsid, str(err))
            return None

        if area is not None:
            ds.attrs['area'] = area
            if (('x' not in ds.coords) or('y' not in ds.coords)) and \
                    hasattr(area, 'get_proj_vectors_dask'):
                ds['x'], ds['y'] = area.get_proj_vectors_dask(CHUNK_SIZE)
        return ds

    def _load_ancillary_variables(self, datasets):
        """Load the ancillary variables of `datasets`."""
        all_av_ids = set()
        for dataset in datasets.values():
            ancillary_variables = dataset.attrs.get('ancillary_variables', [])
            if not isinstance(ancillary_variables, (list, tuple, set)):
                ancillary_variables = ancillary_variables.split(' ')
            av_ids = []
            for key in ancillary_variables:
                try:
                    av_ids.append(self.get_dataset_key(key))
                except KeyError:
                    logger.warning("Can't load ancillary dataset %s", str(key))

            all_av_ids |= set(av_ids)
            dataset.attrs['ancillary_variables'] = av_ids
        loadable_av_ids = [
            av_id for av_id in all_av_ids if av_id not in datasets
        ]
        if not all_av_ids:
            return
        if loadable_av_ids:
            self.load(loadable_av_ids, previous_datasets=datasets)

        for dataset in datasets.values():
            new_vars = []
            for av_id in dataset.attrs.get('ancillary_variables', []):
                if isinstance(av_id, DatasetID):
                    new_vars.append(datasets[av_id])
                else:
                    new_vars.append(av_id)
            dataset.attrs['ancillary_variables'] = new_vars

    def load(self, dataset_keys, previous_datasets=None):
        """Load `dataset_keys`.

        If `previous_datasets` is provided, do not reload those."""
        all_datasets = previous_datasets or DatasetDict()
        datasets = DatasetDict()

        # Include coordinates in the list of datasets to load
        dsids = [self.get_dataset_key(ds_key) for ds_key in dataset_keys]
        coordinates = self._get_coordinates_for_dataset_keys(dsids)
        all_dsids = list(set().union(*coordinates.values())) + dsids

        for dsid in all_dsids:
            if dsid in all_datasets:
                continue
            coords = [
                all_datasets.get(cid, None)
                for cid in coordinates.get(dsid, [])
            ]
            ds = self._load_dataset_with_area(dsid, coords)
            if ds is not None:
                all_datasets[dsid] = ds
                if dsid in dsids:
                    datasets[dsid] = ds
        self._load_ancillary_variables(all_datasets)

        return datasets
Ejemplo n.º 38
0
class User(object):
    EVENT_CHAT_MESSAGE = 'chat_message'
    """On new private chat message

    :param user: steam user
    :type user: :class:`.SteamUser`
    :param message: message text
    :type message: str
    """

    persona_state = EPersonaState.Online    #: current persona state
    user = None                             #: :class:`.SteamUser` instance once logged on
    current_games_played = []               #: :class:`list` of app ids currently being played

    def __init__(self, *args, **kwargs):
        super(User, self).__init__(*args, **kwargs)

        self._user_cache = WeakValueDictionary()

        self.on(self.EVENT_DISCONNECTED, self.__handle_disconnect)
        self.on(self.EVENT_LOGGED_ON, self.__handle_set_persona)
        self.on(EMsg.ClientPersonaState, self.__handle_persona_state)
        self.on(EMsg.ClientFriendMsgIncoming, self.__handle_message_incoming)

    def __handle_message_incoming(self, msg):
        if msg.body.chat_entry_type == EChatEntryType.ChatMsg:
            user = self.get_user(msg.body.steamid_from)
            self.emit("chat_message", user, msg.body.message.decode('utf-8'))

    def __handle_disconnect(self):
        self.user = None
        self.current_games_played = []

    def __handle_set_persona(self):
        self.user = self.get_user(self.steam_id, False)

        if self.persona_state != EPersonaState.Offline:
            self.change_status(persona_state=self.persona_state)

    def __handle_persona_state(self, message):
        for friend in message.body.friends:
            steamid = friend.friendid

            if steamid in self._user_cache:
                suser = self._user_cache[steamid]
                suser._pstate = friend
                suser._pstate_ready.set()

    def change_status(self, **kwargs):
        """
        Set name, persona state, flags

        .. note::
            Changing persona state will also change :attr:`persona_state`

        :param persona_state: persona state (Online/Offlane/Away/etc)
        :type persona_state: :class:`.EPersonaState`
        :param player_name: profile name
        :type player_name: :class:`str`
        :param persona_state_flags: persona state flags
        :type persona_state_flags: :class:`.EPersonaStateFlag`
        """
        if not kwargs: return

        self.persona_state = kwargs.get('persona_state', self.persona_state)

        message = MsgProto(EMsg.ClientChangeStatus)
        proto_fill_from_dict(message.body, kwargs)
        self.send(message)

    def request_persona_state(self, steam_ids, state_flags=863):
        """Request persona state data

        :param steam_ids: list of steam ids
        :type  steam_ids: :class:`list`
        :param state_flags: client state flags
        :type  state_flags: :class:`.EClientPersonaStateFlag`
        """
        m = MsgProto(EMsg.ClientRequestFriendData)
        m.body.persona_state_requested = state_flags
        m.body.friends.extend(steam_ids)
        self.send(m)

    def get_user(self, steam_id, fetch_persona_state=True):
        """Get :class:`.SteamUser` instance for ``steam id``

        :param steam_id: steam id
        :type steam_id: :class:`int`, :class:`.SteamID`
        :param fetch_persona_state: whether to request person state when necessary
        :type fetch_persona_state: :class:`bool`
        :return: SteamUser instance
        :rtype: :class:`.SteamUser`
        """
        steam_id = int(steam_id)
        suser = self._user_cache.get(steam_id, None)

        if suser is None:
            suser = SteamUser(steam_id, self)
            self._user_cache[steam_id] = suser

            if fetch_persona_state:
                self.request_persona_state([steam_id])

        return suser

    def games_played(self, app_ids):
        """
        Set the apps being played by the user

        :param app_ids: a list of application ids
        :type app_ids: :class:`list`

        These app ids will be recorded in :attr:`current_games_played`.
        """
        if not isinstance(app_ids, list):
            raise ValueError("Expected app_ids to be of type list")

        self.current_games_played = app_ids = list(map(int, app_ids))

        self.send(MsgProto(EMsg.ClientGamesPlayed),
                  {'games_played': [{'game_id': app_id} for app_id in app_ids]}
                  )
Ejemplo n.º 39
0
class FileYAMLReader(AbstractYAMLReader):
    """Primary reader base class that is configured by a YAML file.

    This class uses the idea of per-file "file handler" objects to read file
    contents and determine what is available in the file. This differs from
    the base :class:`AbstractYAMLReader` which does not depend on individual
    file handler objects. In almost all cases this class should be used over
    its base class and can be used as a reader by itself and requires no
    subclassing.

    """

    def __init__(self,
                 config_files,
                 filter_parameters=None,
                 filter_filenames=True,
                 **kwargs):
        """Set up initial internal storage for loading file data."""
        super(FileYAMLReader, self).__init__(config_files)

        self.file_handlers = {}
        self.available_ids = {}
        self.filter_filenames = self.info.get('filter_filenames', filter_filenames)
        self.filter_parameters = filter_parameters or {}
        self.coords_cache = WeakValueDictionary()

    @property
    def sensor_names(self):
        """Names of sensors whose data is being loaded by this reader."""
        if not self.file_handlers:
            return self.info['sensors']

        file_handlers = (handlers[0] for handlers in
                         self.file_handlers.values())
        sensor_names = set()
        for fh in file_handlers:
            try:
                sensor_names.update(fh.sensor_names)
            except NotImplementedError:
                continue
        if not sensor_names:
            return self.info['sensors']
        return sorted(sensor_names)

    @property
    def available_dataset_ids(self):
        """Get DataIDs that are loadable by this reader."""
        return self.available_ids.keys()

    @property
    def start_time(self):
        """Start time of the earlier file used by this reader."""
        if not self.file_handlers:
            raise RuntimeError("Start time unknown until files are selected")
        return min(x[0].start_time for x in self.file_handlers.values())

    @property
    def end_time(self):
        """End time of the latest file used by this reader."""
        if not self.file_handlers:
            raise RuntimeError("End time unknown until files are selected")
        return max(x[-1].end_time for x in self.file_handlers.values())

    @staticmethod
    def check_file_covers_area(file_handler, check_area):
        """Check if the file covers the current area.

        If the file doesn't provide any bounding box information or 'area'
        was not provided in `filter_parameters`, the check returns True.
        """
        try:
            gbb = Boundary(*file_handler.get_bounding_box())
        except NotImplementedError as err:
            logger.debug("Bounding box computation not implemented: %s",
                         str(err))
        else:
            abb = AreaDefBoundary(get_area_def(check_area), frequency=1000)

            intersection = gbb.contour_poly.intersection(abb.contour_poly)
            if not intersection:
                return False
        return True

    def find_required_filehandlers(self, requirements, filename_info):
        """Find the necessary file handlers for the given requirements.

        We assume here requirements are available.

        Raises:
            KeyError, if no handler for the given requirements is available.
            RuntimeError, if there is a handler for the given requirements,
            but it doesn't match the filename info.

        """
        req_fh = []
        filename_info = set(filename_info.items())
        if requirements:
            for requirement in requirements:
                for fhd in self.file_handlers[requirement]:
                    if set(fhd.filename_info.items()).issubset(filename_info):
                        req_fh.append(fhd)
                        break
                else:
                    raise RuntimeError("No matching requirement file of type "
                                       "{}".format(requirement))
                    # break everything and continue to next
                    # filetype!
        return req_fh

    def sorted_filetype_items(self):
        """Sort the instance's filetypes in using order."""
        processed_types = []
        file_type_items = deque(self.config['file_types'].items())
        while len(file_type_items):
            filetype, filetype_info = file_type_items.popleft()

            requirements = filetype_info.get('requires')
            if requirements is not None:
                # requirements have not been processed yet -> wait
                missing = [req for req in requirements
                           if req not in processed_types]
                if missing:
                    file_type_items.append((filetype, filetype_info))
                    continue

            processed_types.append(filetype)
            yield filetype, filetype_info

    @staticmethod
    def filename_items_for_filetype(filenames, filetype_info):
        """Iterate over the filenames matching *filetype_info*."""
        if not isinstance(filenames, set):
            # we perform set operations later on to improve performance
            filenames = set(filenames)
        for pattern in filetype_info['file_patterns']:
            matched_files = set()
            matches = _match_filenames(filenames, pattern)
            for filename in matches:
                try:
                    filename_info = parse(
                        pattern, _get_filebase(filename, pattern))
                except ValueError:
                    logger.debug("Can't parse %s with %s.", filename, pattern)
                    continue
                matched_files.add(filename)
                yield filename, filename_info
            filenames -= matched_files

    def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=None):
        """Generate new filehandler instances."""
        requirements = filetype_info.get('requires')
        filetype_cls = filetype_info['file_reader']

        if fh_kwargs is None:
            fh_kwargs = {}

        for filename, filename_info in filename_items:
            try:
                req_fh = self.find_required_filehandlers(requirements,
                                                         filename_info)
            except KeyError as req:
                msg = "No handler for reading requirement {} for {}".format(
                    req, filename)
                warnings.warn(msg)
                continue
            except RuntimeError as err:
                warnings.warn(str(err) + ' for {}'.format(filename))
                continue

            yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs)

    def time_matches(self, fstart, fend):
        """Check that a file's start and end time mtach filter_parameters of this reader."""
        start_time = self.filter_parameters.get('start_time')
        end_time = self.filter_parameters.get('end_time')
        fend = fend or fstart
        if start_time and fend and fend < start_time:
            return False
        if end_time and fstart and fstart > end_time:
            return False
        return True

    def metadata_matches(self, sample_dict, file_handler=None):
        """Check that file metadata matches filter_parameters of this reader."""
        # special handling of start/end times
        if not self.time_matches(
                sample_dict.get('start_time'), sample_dict.get('end_time')):
            return False
        for key, val in self.filter_parameters.items():
            if key != 'area' and key not in sample_dict:
                continue

            if key in ['start_time', 'end_time']:
                continue
            elif key == 'area' and file_handler:
                if not self.check_file_covers_area(file_handler, val):
                    logger.info('Filtering out %s based on area',
                                file_handler.filename)
                    break
            elif key in sample_dict and val != sample_dict[key]:
                # don't use this file
                break
        else:
            # all the metadata keys are equal
            return True
        return False

    def filter_filenames_by_info(self, filename_items):
        """Filter out file using metadata from the filenames.

        Currently only uses start and end time. If only start time is available
        from the filename, keep all the filename that have a start time before
        the requested end time.
        """
        for filename, filename_info in filename_items:
            fend = filename_info.get('end_time')
            fstart = filename_info.setdefault('start_time', fend)
            if fend and fend < fstart:
                # correct for filenames with 1 date and 2 times
                fend = fend.replace(year=fstart.year,
                                    month=fstart.month,
                                    day=fstart.day)
                filename_info['end_time'] = fend
            if self.metadata_matches(filename_info):
                yield filename, filename_info

    def filter_fh_by_metadata(self, filehandlers):
        """Filter out filehandlers using provide filter parameters."""
        for filehandler in filehandlers:
            filehandler.metadata['start_time'] = filehandler.start_time
            filehandler.metadata['end_time'] = filehandler.end_time
            if self.metadata_matches(filehandler.metadata, filehandler):
                yield filehandler

    def filter_selected_filenames(self, filenames):
        """Filter provided files based on metadata in the filename."""
        if not isinstance(filenames, set):
            # we perform set operations later on to improve performance
            filenames = set(filenames)
        for _, filetype_info in self.sorted_filetype_items():
            filename_iter = self.filename_items_for_filetype(filenames,
                                                             filetype_info)
            if self.filter_filenames:
                filename_iter = self.filter_filenames_by_info(filename_iter)

            for fn, _ in filename_iter:
                yield fn

    def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=None):
        """Create filehandlers for a given filetype."""
        filename_iter = self.filename_items_for_filetype(filenames,
                                                         filetype_info)
        if self.filter_filenames:
            # preliminary filter of filenames based on start/end time
            # to reduce the number of files to open
            filename_iter = self.filter_filenames_by_info(filename_iter)
        filehandler_iter = self._new_filehandler_instances(filetype_info,
                                                           filename_iter,
                                                           fh_kwargs=fh_kwargs)
        filtered_iter = self.filter_fh_by_metadata(filehandler_iter)
        return list(filtered_iter)

    def create_filehandlers(self, filenames, fh_kwargs=None):
        """Organize the filenames into file types and create file handlers."""
        filenames = list(OrderedDict.fromkeys(filenames))
        logger.debug("Assigning to %s: %s", self.info['name'], filenames)

        self.info.setdefault('filenames', []).extend(filenames)
        filename_set = set(filenames)
        created_fhs = {}
        # load files that we know about by creating the file handlers
        for filetype, filetype_info in self.sorted_filetype_items():
            filehandlers = self._new_filehandlers_for_filetype(filetype_info,
                                                               filename_set,
                                                               fh_kwargs=fh_kwargs)

            if filehandlers:
                created_fhs[filetype] = filehandlers
                self.file_handlers[filetype] = sorted(
                    self.file_handlers.get(filetype, []) + filehandlers,
                    key=lambda fhd: (fhd.start_time, fhd.filename))

        # load any additional dataset IDs determined dynamically from the file
        # and update any missing metadata that only the file knows
        self.update_ds_ids_from_file_handlers()
        return created_fhs

    def _file_handlers_available_datasets(self):
        """Generate a series of available dataset information.

        This is done by chaining file handler's
        :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets`
        together. See that method's documentation for more information.

        Returns:
            Generator of (bool, dict) where the boolean tells whether the
            current dataset is available from any of the file handlers. The
            boolean can also be None in the case where no loaded file handler
            is configured to load the dataset. The
            dictionary is the metadata provided either by the YAML
            configuration files or by the file handler itself if it is a new
            dataset. The file handler may have also supplemented or modified
            the information.

        """
        # flatten all file handlers in to one list
        flat_fhs = (fh for fhs in self.file_handlers.values() for fh in fhs)
        id_values = list(self.all_ids.values())
        configured_datasets = ((None, ds_info) for ds_info in id_values)
        for fh in flat_fhs:
            # chain the 'available_datasets' methods together by calling the
            # current file handler's method with the previous ones result
            configured_datasets = fh.available_datasets(configured_datasets=configured_datasets)
        return configured_datasets

    def update_ds_ids_from_file_handlers(self):
        """Add or modify available dataset information.

        Each file handler is consulted on whether or not it can load the
        dataset with the provided information dictionary.
        See
        :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets`
        for more information.

        """
        avail_datasets = self._file_handlers_available_datasets()
        new_ids = {}
        for is_avail, ds_info in avail_datasets:
            # especially from the yaml config
            coordinates = ds_info.get('coordinates')
            if isinstance(coordinates, list):
                # xarray doesn't like concatenating attributes that are
                # lists: https://github.com/pydata/xarray/issues/2060
                ds_info['coordinates'] = tuple(ds_info['coordinates'])

            ds_info.setdefault('modifiers', tuple())  # default to no mods

            # Create DataID for this dataset
            ds_id = DataID(self._id_keys, **ds_info)
            # all datasets
            new_ids[ds_id] = ds_info
            # available datasets
            # False == we have the file type but it doesn't have this dataset
            # None == we don't have the file type object to ask
            if is_avail:
                self.available_ids[ds_id] = ds_info
        self.all_ids = new_ids

    @staticmethod
    def _load_dataset(dsid, ds_info, file_handlers, dim='y', **kwargs):
        """Load only a piece of the dataset."""
        slice_list = []
        failure = True
        for fh in file_handlers:
            try:
                projectable = fh.get_dataset(dsid, ds_info)
                if projectable is not None:
                    slice_list.append(projectable)
                    failure = False
            except KeyError:
                logger.warning("Failed to load {} from {}".format(dsid, fh),
                               exc_info=True)

        if failure:
            raise KeyError(
                "Could not load {} from any provided files".format(dsid))

        if dim not in slice_list[0].dims:
            return slice_list[0]
        res = xr.concat(slice_list, dim=dim)

        combined_info = file_handlers[0].combine_info(
            [p.attrs for p in slice_list])

        res.attrs = combined_info
        return res

    def _load_dataset_data(self, file_handlers, dsid, **kwargs):
        ds_info = self.all_ids[dsid]
        proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs)
        # FIXME: areas could be concatenated here
        # Update the metadata
        proj.attrs['start_time'] = file_handlers[0].start_time
        proj.attrs['end_time'] = file_handlers[-1].end_time
        return proj

    def _preferred_filetype(self, filetypes):
        """Get the preferred filetype out of the *filetypes* list.

        At the moment, it just returns the first filetype that has been loaded.
        """
        if not isinstance(filetypes, list):
            filetypes = [filetypes]

        # look through the file types and use the first one that we have loaded
        for filetype in filetypes:
            if filetype in self.file_handlers:
                return filetype
        return None

    def _load_area_def(self, dsid, file_handlers, **kwargs):
        """Load the area definition of *dsid*."""
        return _load_area_def(dsid, file_handlers)

    def _get_coordinates_for_dataset_key(self, dsid):
        """Get the coordinate dataset keys for *dsid*."""
        ds_info = self.all_ids[dsid]
        cids = []

        for cinfo in ds_info.get('coordinates', []):
            if not isinstance(cinfo, dict):
                cinfo = {'name': cinfo}
            for key in self._co_keys:
                if key == 'name':
                    continue
                if key in ds_info:
                    if ds_info[key] is not None:
                        cinfo[key] = ds_info[key]
            cid = DataQuery.from_dict(cinfo)
            cids.append(self.get_dataset_key(cid))

        return cids

    def _get_coordinates_for_dataset_keys(self, dsids):
        """Get all coordinates."""
        coordinates = {}
        for dsid in dsids:
            cids = self._get_coordinates_for_dataset_key(dsid)
            coordinates.setdefault(dsid, []).extend(cids)
        return coordinates

    def _get_file_handlers(self, dsid):
        """Get the file handler to load this dataset."""
        ds_info = self.all_ids[dsid]

        filetype = self._preferred_filetype(ds_info['file_type'])
        if filetype is None:
            logger.warning("Required file type '%s' not found or loaded for "
                           "'%s'", ds_info['file_type'], dsid['name'])
        else:
            return self.file_handlers[filetype]

    def _make_area_from_coords(self, coords):
        """Create an appropriate area with the given *coords*."""
        if len(coords) == 2:
            lon_sn = coords[0].attrs.get('standard_name')
            lat_sn = coords[1].attrs.get('standard_name')
            if lon_sn == 'longitude' and lat_sn == 'latitude':
                key = None
                try:
                    key = (coords[0].data.name, coords[1].data.name)
                    sdef = self.coords_cache.get(key)
                except AttributeError:
                    sdef = None
                if sdef is None:
                    sdef = SwathDefinition(*coords)
                    sensor_str = '_'.join(self.info['sensors'])
                    shape_str = '_'.join(map(str, coords[0].shape))
                    sdef.name = "{}_{}_{}_{}".format(sensor_str, shape_str,
                                                     coords[0].attrs['name'],
                                                     coords[1].attrs['name'])
                    if key is not None:
                        self.coords_cache[key] = sdef
                return sdef
            else:
                raise ValueError(
                    'Coordinates info object missing standard_name key: ' +
                    str(coords))
        elif len(coords) != 0:
            raise NameError("Don't know what to do with coordinates " + str(
                coords))

    def _load_dataset_area(self, dsid, file_handlers, coords, **kwargs):
        """Get the area for *dsid*."""
        try:
            return self._load_area_def(dsid, file_handlers, **kwargs)
        except NotImplementedError:
            if any(x is None for x in coords):
                logger.warning(
                    "Failed to load coordinates for '{}'".format(dsid))
                return None

            area = self._make_area_from_coords(coords)
            if area is None:
                logger.debug("No coordinates found for %s", str(dsid))
            return area

    def _load_dataset_with_area(self, dsid, coords, **kwargs):
        """Load *dsid* and its area if available."""
        file_handlers = self._get_file_handlers(dsid)
        if not file_handlers:
            return

        area = self._load_dataset_area(dsid, file_handlers, coords, **kwargs)

        try:
            ds = self._load_dataset_data(file_handlers, dsid, **kwargs)
        except (KeyError, ValueError) as err:
            logger.exception("Could not load dataset '%s': %s", dsid, str(err))
            return None

        if area is not None:
            ds.attrs['area'] = area
            ds = add_crs_xy_coords(ds, area)
        return ds

    def _load_ancillary_variables(self, datasets, **kwargs):
        """Load the ancillary variables of `datasets`."""
        all_av_ids = set()
        for dataset in datasets.values():
            ancillary_variables = dataset.attrs.get('ancillary_variables', [])
            if not isinstance(ancillary_variables, (list, tuple, set)):
                ancillary_variables = ancillary_variables.split(' ')
            av_ids = []
            for key in ancillary_variables:
                try:
                    av_ids.append(self.get_dataset_key(key))
                except KeyError:
                    logger.warning("Can't load ancillary dataset %s", str(key))

            all_av_ids |= set(av_ids)
            dataset.attrs['ancillary_variables'] = av_ids
        loadable_av_ids = [av_id for av_id in all_av_ids if av_id not in datasets]
        if not all_av_ids:
            return
        if loadable_av_ids:
            self.load(loadable_av_ids, previous_datasets=datasets, **kwargs)

        for dataset in datasets.values():
            new_vars = []
            for av_id in dataset.attrs.get('ancillary_variables', []):
                if isinstance(av_id, DataID):
                    new_vars.append(datasets[av_id])
                else:
                    new_vars.append(av_id)
            dataset.attrs['ancillary_variables'] = new_vars

    def get_dataset_key(self, key, available_only=False, **kwargs):
        """Get the fully qualified `DataID` matching `key`.

        This will first search through available DataIDs, datasets that
        should be possible to load, and fallback to "known" datasets, those
        that are configured but aren't loadable from the provided files.
        Providing ``available_only=True`` will stop this fallback behavior
        and raise a ``KeyError`` exception if no available dataset is found.

        Args:
            key (str, float, DataID, DataQuery): Key to search for in this reader.
            available_only (bool): Search only loadable datasets for the
                provided key. Loadable datasets are always searched first,
                but if ``available_only=False`` (default) then all known
                datasets will be searched.
            kwargs: See :func:`satpy.readers.get_key` for more information about
                kwargs.

        Returns:
            Best matching DataID to the provided ``key``.

        Raises:
            KeyError: if no key match is found.

        """
        try:
            return get_key(key, self.available_ids.keys(), **kwargs)
        except KeyError:
            if available_only:
                raise
            return get_key(key, self.all_ids.keys(), **kwargs)

    def load(self, dataset_keys, previous_datasets=None, **kwargs):
        """Load `dataset_keys`.

        If `previous_datasets` is provided, do not reload those.
        """
        all_datasets = previous_datasets or DatasetDict()
        datasets = DatasetDict()

        # Include coordinates in the list of datasets to load
        dsids = [self.get_dataset_key(ds_key) for ds_key in dataset_keys]
        coordinates = self._get_coordinates_for_dataset_keys(dsids)
        all_dsids = list(set().union(*coordinates.values())) + dsids
        for dsid in all_dsids:
            if dsid in all_datasets:
                continue
            coords = [all_datasets.get(cid, None)
                      for cid in coordinates.get(dsid, [])]
            ds = self._load_dataset_with_area(dsid, coords, **kwargs)
            if ds is not None:
                all_datasets[dsid] = ds
                if dsid in dsids:
                    datasets[dsid] = ds
        self._load_ancillary_variables(all_datasets, **kwargs)

        return datasets
class PresenceService(ExportedGObject):
    __gtype_name__ = "PresenceService"

    def _create_owner(self):
        # Overridden by TestPresenceService
        return ShellOwner(self, self._session_bus)

    def __init__(self):
        self._next_object_id = 0

        # all Buddy objects
        # identifier -> Buddy, GC'd when no more refs exist
        self._buddies = WeakValueDictionary()

        # the online buddies for whom we know the full public key
        # base64 public key -> Buddy
        self._buddies_by_pubkey = {}

        # The online buddies (those who're available via some CM)
        # TP plugin -> (handle -> Buddy)
        self._handles_buddies = {}

        # activity id -> Activity
        self._activities_by_id = {}
        #: Tp plugin -> (handle -> Activity)
        self._activities_by_handle = {}

        #: Connection -> list of SignalMatch
        self._conn_matches = {}

        self._session_bus = dbus.SessionBus()
        self._session_bus.add_signal_receiver(self._connection_disconnected_cb,
                signal_name="Disconnected",
                dbus_interface="org.freedesktop.DBus")

        # Create the Owner object
        self._owner = self._create_owner()
        key = self._owner.props.key
        keyid = pubkey_to_keyid(key)
        self._buddies['keyid/' + keyid] = self._owner
        self._buddies_by_pubkey[key] = self._owner

        self._registry = ManagerRegistry()
        self._registry.LoadManagers()

        # Set up the Telepathy plugins
        self._plugins = []
        debug_flags = set(environ.get('PRESENCE_SERVICE_DEBUG', '').split(','))
        _logger.debug('Debug flags: %r', debug_flags)
        if 'disable-gabble' in debug_flags:
            self._server_plugin = None
        else:
            server = self._owner.get_server()
            if server and len(server):
                self._server_plugin = ServerPlugin(self._registry, self._owner)
                self._plugins.append(self._server_plugin)
            else:
                self._server_plugin = None
        if 'disable-salut' in debug_flags:
            self._ll_plugin = None
        else:
            self._ll_plugin = LinkLocalPlugin(self._registry, self._owner)
            self._plugins.append(self._ll_plugin)
        self._connected_plugins = set()

        for tp in self._plugins:
            self._handles_buddies[tp] = {}
            self._activities_by_handle[tp] = {}

            tp.connect('status', self._tp_status_cb)
            tp.connect('contacts-online', self._contacts_online)
            tp.connect('contacts-offline', self._contacts_offline)
            tp.connect('activity-invitation',
                                        self._activity_invitation)
            tp.connect('private-invitation',
                                        self._private_invitation)
            tp.connect('want-to-connect', self._want_to_connect)

            connection = tp.get_connection()
            if connection is not None:
                status = connection.GetStatus()
                self._tp_status_cb(tp, status, CONNECTION_STATUS_REASON_NONE_SPECIFIED)

        self._contacts_online_queue = []

        ExportedGObject.__init__(self, self._session_bus, _PRESENCE_PATH)

        # for activation to work in a race-free way, we should really
        # export the bus name only after we export our initial object;
        # so this comes after the parent __init__
        self._bus_name = dbus.service.BusName(_PRESENCE_SERVICE,
                                              bus=self._session_bus)

    @property
    def owner(self):
        return self._owner

    def _connection_disconnected_cb(self, data=None):
        """Log event when D-Bus kicks us off the bus for some reason"""
        _logger.debug("Disconnected from session bus!!!")

    def _tp_status_cb(self, plugin, status, reason):
        if status == CONNECTION_STATUS_CONNECTED:
            self._tp_connected(plugin)
        else:
            self._tp_disconnected(plugin)

    def _tp_connected(self, tp):
        self._connected_plugins.add(tp)
        self._handles_buddies[tp][tp.self_handle] = self._owner
        self._owner.add_telepathy_handle(tp, tp.self_handle,
                                         tp.self_identifier)

        conn = tp.get_connection()

        self._conn_matches[conn] = []

        if CONN_INTERFACE_ACTIVITY_PROPERTIES in conn:
            def activity_properties_changed(room, properties):
                self._activity_properties_changed(tp, room, properties)
            m = conn[CONN_INTERFACE_ACTIVITY_PROPERTIES].connect_to_signal(
                    'ActivityPropertiesChanged',
                    activity_properties_changed)
            self._conn_matches[conn].append(m)
        else:
            _logger.warning('Connection %s does not support OLPC activity '
                            'properties', conn.object_path)

        if CONN_INTERFACE_BUDDY_INFO in conn:
            def buddy_activities_changed(contact, activities):
                _logger.debug('ActivitiesChanged on %s: (%u, %r)', tp,
                              contact, activities)
                self._buddy_activities_changed(tp, contact, activities)
            m = conn[CONN_INTERFACE_BUDDY_INFO].connect_to_signal(
                    'ActivitiesChanged', buddy_activities_changed)
            self._conn_matches[conn].append(m)

            def buddy_properties_changed(contact, properties):
                buddy = self._handles_buddies[tp].get(contact)
                if buddy is not None and buddy is not self._owner:
                    buddy.update_buddy_properties(tp, properties)
            m = conn[CONN_INTERFACE_BUDDY_INFO].connect_to_signal(
                'PropertiesChanged', buddy_properties_changed,
                byte_arrays=True)
            self._conn_matches[conn].append(m)

            def buddy_curact_changed(contact, act_id, room):
                if (act_id == '' or not util.validate_activity_id(act_id) or
                    room == 0):
                    act_id = ''
                    room = 0
                buddy = self._handles_buddies[tp].get(contact)
                if buddy is not None and buddy is not self._owner:
                    buddy.update_current_activity(tp, act_id)
                # FIXME: do something useful with the room handle?
            m = conn[CONN_INTERFACE_BUDDY_INFO].connect_to_signal(
                'CurrentActivityChanged', buddy_curact_changed)
            self._conn_matches[conn].append(m)
        else:
            _logger.warning('Connection %s does not support OLPC buddy info',
                            conn.object_path)

        if 1:
            # FIXME: Avatars have been disabled for Trial-2 due to performance
            # issues in the avatar cache. Revisit this afterwards
            pass
        elif CONN_INTERFACE_AVATARS in conn:
            def avatar_retrieved(contact, avatar_token, avatar, mime_type):
                self._avatar_updated(tp, contact, avatar_token, avatar,
                                     mime_type)
            m = conn[CONN_INTERFACE_AVATARS].connect_to_signal(
                    'AvatarRetrieved', avatar_retrieved)
            self._conn_matches[conn].append(m)

            def avatar_updated(contact, avatar_token):
                self._avatar_updated(tp, contact, avatar_token)
            m = conn[CONN_INTERFACE_AVATARS].connect_to_signal('AvatarUpdated',
                    avatar_updated)
            self._conn_matches[conn].append(m)
        else:
            _logger.warning('Connection %s does not support avatars',
                            conn.object_path)

        if CONN_INTERFACE_ALIASING in conn:
            def aliases_changed(aliases):
                for contact, alias in aliases:
                    buddy = self._handles_buddies[tp].get(contact)
                    if buddy is not None and buddy is not self._owner:
                        buddy.update_alias(tp, alias)
            m = conn[CONN_INTERFACE_ALIASING].connect_to_signal(
                    'AliasesChanged', aliases_changed)
            self._conn_matches[conn].append(m)
        else:
            _logger.warning('Connection %s does not support aliasing',
                            conn.object_path)

    def _tp_disconnected(self, tp):
        self._connected_plugins.discard(tp)
        if tp.self_handle is not None:
            self._handles_buddies.setdefault(tp, {}).pop(
                    tp.self_handle, None)
        self._owner.remove_telepathy_handle(tp)

        conn = tp.get_connection()

        matches = self._conn_matches.get(conn)
        try:
            del self._conn_matches[conn]
        except KeyError:
            pass
        if matches is not None:
            for match in matches:
                match.remove()

    def get_buddy_by_path(self, path):
        """Get the Buddy object corresponding to an object-path, or None.

        :Parameters:
            path : dbus.ObjectPath
                The object-path of a buddy
        :Returns: a Buddy object or None
        """
        if not path.startswith(BUDDY_PATH):
            return None
        return self._buddies.get(path[len(BUDDY_PATH):])

    def get_buddy(self, objid):
        buddy = self._buddies.get(objid)
        if buddy is None:
            _logger.debug('Creating new buddy at .../%s', objid)
            # we don't know yet this buddy
            buddy = Buddy(self._session_bus, objid)
            buddy.connect("validity-changed", self._buddy_validity_changed_cb)
            buddy.connect("disappeared", self._buddy_disappeared_cb)
            self._buddies[objid] = buddy
        return buddy

    def _contacts_online(self, tp, objids, handles, identifiers):
        # we'll iterate over handles many times, so make sure that will
        # work
        if not isinstance(handles, (list, tuple)):
            handles = tuple(handles)

        for objid, handle, identifier in izip(objids, handles, identifiers):
            _logger.debug('Handle %u, .../%s is now online', handle, objid)
            buddy = self.get_buddy(objid)

            self._handles_buddies[tp][handle] = buddy
            # Store the handle of the buddy for this CM. This doesn't
            # fetch anything over D-Bus, to avoid reaching the pending-call
            # limit.
            buddy.add_telepathy_handle(tp, handle, identifier)

        conn = tp.get_connection()

        if not self._contacts_online_queue:
            gobject.idle_add(self._run_contacts_online_queue)

        def handle_error(e, when):
            gobject.idle_add(self._run_contacts_online_queue)
            _logger.warning('Error %s: %s', when, e)

        if CONN_INTERFACE_ALIASING in conn:
            def got_aliases(aliases):
                gobject.idle_add(self._run_contacts_online_queue)
                for contact, alias in izip(handles, aliases):
                    buddy = self._handles_buddies[tp].get(contact)
                    if buddy is not None and buddy is not self._owner:
                        buddy.update_alias(tp, alias)
            def request_aliases():
                try:
                    conn[CONN_INTERFACE_ALIASING].RequestAliases(handles,
                        reply_handler=got_aliases,
                        error_handler=lambda e:
                            handle_error(e, 'fetching aliases'))
                except Exception, e:
                    gobject.idle_add(self._run_contacts_online_queue)
                    handle_error(e, 'fetching aliases')
            self._contacts_online_queue.append(request_aliases)

        for handle in handles:
            self._queue_contact_online(tp, handle)

        if 1:
            # FIXME: Avatars have been disabled for Trial-2 due to performance
            # issues in the avatar cache. Revisit this afterwards
            pass
        elif CONN_INTERFACE_AVATARS in conn:
            def got_avatar_tokens(tokens):
                gobject.idle_add(self._run_contacts_online_queue)
                for contact, token in izip(handles, tokens):
                    self._avatar_updated(tp, contact, token)
            def get_avatar_tokens():
                try:
                    conn[CONN_INTERFACE_AVATARS].GetAvatarTokens(handles,
                        reply_handler=got_avatar_tokens,
                        error_handler=lambda e:
                            handle_error(e, 'fetching avatar tokens'))
                except Exception, e:
                    gobject.idle_add(self._run_contacts_online_queue)
                    handle_error(e, 'fetching avatar tokens')
            self._contacts_online_queue.append(get_avatar_tokens)
Ejemplo n.º 41
0
class Service(object):
    """I manage a set of related :class:`ILocalCore`'s.

    All the resources in a service are stored in the same
    `rdflib.store.Store`:class:.

    :param classes: a list of classes to be used by this service (see below)
    :param service_config: kTBS configuration
    :param init_with: a callable to initialize the store if necessary (i.e. at
        least populate the root resource); it will be passed this service as
        its sole argument.

    root_uri (str), the URI of the root resource of this service
    store (rdflib.store.Store), the RDF store containing the data of this service
    init_with, a callable to initialize the store if necessary (i.e. at
    least populate the root resource); it will be passed this service as
    its sole argument.

    The classes passed to this service should all be subclasses of
    :class:`ILocalCore`, and all have an attribute `RDF_MAIN_TYPE`
    indicating the RDF type they implement.
    """
    # too few public methods (1/2) #pylint: disable=R0903

    def __init__(self, classes, service_config=None, init_with=None):
        """I create a local RDF-REST service around the given store.
        """
        if service_config is None:
            service_config = get_service_configuration()

        self.config = service_config
        root_uri = build_service_root_uri(service_config)

        assert urisplit(root_uri)[3:] == (None, None), \
            "Invalid URI <%s>" % root_uri
        self.root_uri = coerce_to_uri(root_uri)

        apply_logging_config(service_config)

        init_repo = False
        repository = service_config.get('rdf_database', 'repository', 1)
        if not repository:
            init_repo = True
            repository = ":IOMemory:"
        elif repository[0] != ":":
            init_repo = not exists(repository)
            repository = ":Sleepycat:%s" % repository

        # Whether we should force data repository initialization
        if service_config.getboolean('rdf_database', 'force-init'):
            init_repo = True

        _, store_type, config_str = repository.split(":", 2)
        store = rdflib_plugin.get(store_type, Store)(config_str)

        self.store = store
        self.class_map = class_map = {}
        for cls in classes:
            assert issubclass(cls, ILocalCore)
            assert cls.RDF_MAIN_TYPE not in class_map, \
                "duplicate RDF_MAIN_TYPE <%s>" % cls.RDF_MAIN_TYPE
            class_map[cls.RDF_MAIN_TYPE] = cls

        # about self._resource_cache: this is not per se a cache,
        # but ensures that we will not generate multiple instances for the
        # same resource.
        self._resource_cache = WeakValueDictionary()
        self._context_level = 0

        metadata_graph = self.get_metadata_graph(root_uri)
        initialized = list(metadata_graph.triples((self.root_uri,
                                                   NS.hasImplementation,
                                                   None)))
        if not initialized and init_repo:
            assert init_with, \
                "Store is not initialized, and no initializer was provided"
            init_with(self)
            assert (list(metadata_graph.triples((self.root_uri,
                                                 NS.hasImplementation,
                                                 None)))) # correctly init'ed
            
        register_service(self)

    def __del__(self):
        try:
            unregister_service(self)
        except BaseException:
            pass

    @HostedCore.handle_fragments
    def get(self, uri, rdf_types=None, _no_spawn=False):
        """Get a resource from this service.

        :param uri:      the URI of the resource
        :type  uri:      :class:`~rdflib.URIRef`
        :param rdf_types: if provided, a list of expected RDF types of the resource
        :type  rdf_types: list of :class:`rdflib.term.URIRef`
        :param _no_spawn: if True, only *pre-existing* python objects will be
                          returned
        :type  _no_spawn: bool

        :return: the resource, or None
        :rtype:  :class:`ILocalCore` or :class:`~.cores.hosted.HostedCore`

        TODO NOW: if no resource is found, try to get it from parent resource

        NB: if uri contains a fragment-id, the returned resource will be a
        `~rdfrest.cores.hosted.HostedCore`:class: hosted by a resource from this
        service.

        When using this function, it is a good practice to indicate the expected
        return type, either informally (with a comment) or formally, with a
        statement of the form::
    
            assert isinstance(returned_object, expected_class)
        """
        assert isinstance(uri, URIRef)
        assert rdf_types is None  or  isinstance(rdf_types, list)

        querystr, fragid = urisplit(uri)[3:]
        if querystr is not None  or  fragid is not None:
            # fragid is managed by the decorator HostedCore.handle_fragment
            return None
        resource = self._resource_cache.get(uri)
        if resource is None  and  not _no_spawn:
            # find base rdf:type
            metadata = self.get_metadata_graph(uri)
            if len(metadata) == 0:
                return None
            types = list(
                metadata.objects(uri, NS.hasImplementation))
            assert len(types) == 1, types
            typ = types[0]

            # find base python class and wrap it
            py_class = self.class_map.get(typ)
            if py_class is None:
                raise ValueError("No implementation for type <%s> of <%s>"
                                 % (types[0], uri))
            if rdf_types:
                py_class = get_wrapped(py_class, rdf_types)

            # make resource and store it in "cache"
            resource = py_class(self, uri)
            self._resource_cache[uri] = resource
        return resource

    def get_metadata_graph(self, uri):
        """Return the metadata graph for the resource identified by uri

        :param uri: the URI of the resou
        :return: the metadata graph
        :rtype: :class:`rdflib.graph.Graph`
        """
        return Graph(self.store, URIRef(uri + '#metadata'))

    def __enter__(self):
        """Start to modifiy this service.

        The role of using a service as a context is to ensure that data is
        correctly handled by the underlying RDF store:

        * on normal exit,
        :meth:`self.store.commit <rdflib.store.Store.commit>` will be called.
        
        * if an exception is raised,
        :meth:`self.store.rollback <rdflib.store.Store.rollback>` will be
        called.

        * if several contexts are embeded (e.g. by calling a function that
        itself uses the service context), the commit/rollback will only occur
        when exiting the *outermost* context, ensuring that only globally
        consistent states are commited.
    
        Note that the implementations provided in this module already take care
        of using the service context, so implementors relying them should not
        have to worry about it. It may be necessary to explicitly call the
        service context, though, to make a set of resource modifications aromic.

        .. warning::

            For the moment (2012-07), most implementations of
            :class:`rdflib.store.Store` do not support rollback (which simply
            does nothing). So unless you know for sure that the store you are
            using does support rollback, you should assume that the store is
            corrupted when exiting abnormally from the service context.
        """
        if self._context_level == 0 and self.store.transaction_aware:
            self.store.transaction()
        self._context_level += 1

    def __exit__(self, typ, _value, _traceback):
        """Ends modifications to this service.
        """
        level = self._context_level - 1
        self._context_level = level
        if level == 0:
            if typ is None:
                self.store.commit()
            else:
                self.store.rollback()
                # we rollback *in case* the store supports it,
                # to try to restore it in a consistent state.
                # However there is no guarantee that this work,
                # as not all stores support rollback.
                # This is therefore a best-effort to limit damages,
                # rather than a safe handling of the exception
                # (at least, until all stores support rollback).
                return False
Ejemplo n.º 42
0
class base:
    """
    Maintains the cache information about eclasses available to an ebuild.
    """

    def __init__(self, location=None, eclassdir=None):
        self._eclass_data_inst_cache = WeakValueDictionary()
        # generate this.
        # self.eclasses = {} # {"Name": ("location", "_mtime_")}
        self.location = location
        self.eclassdir = eclassdir

    def get_eclass_data(self, inherits):
        """Return the cachable entries from a list of inherited eclasses.

        Only make get_eclass_data calls for data you know came from
        this eclass_cache, otherwise be ready to catch a KeyError
        exception for any eclass that was requested, but not known to
        this cache.
        """

        keys = OrderedFrozenSet(inherits)
        o = self._eclass_data_inst_cache.get(keys)
        if o is None:
            o = ImmutableDict((k, self.eclasses[k]) for k in keys)
            self._eclass_data_inst_cache[keys] = o
        return o

    def get_eclass(self, eclass):
        o = self.eclasses.get(eclass)
        if o is None:
            return None
        return local_source(o.path)

    eclasses = jit_attr_ext_method("_load_eclasses", "_eclasses")

    def rebuild_cache_entry(self, entry_eclasses):
        """Check if eclass data is still valid.

        Given a dict as returned by get_eclass_data, walk it comparing
        it to internal eclass view.

        :return: a boolean representing whether that eclass data is still
            up to date, or not
        """
        ec = self.eclasses
        d = {}

        for eclass, chksums in entry_eclasses:
            data = ec.get(eclass)
            if any(val != getattr(data, chf, None) for chf, val in chksums):
                return None
            d[eclass] = data

        return d

    def __getstate__(self):
        d = self.__dict__.copy()
        del d['_eclass_data_inst_cache']
        return d

    def __setstate__(self, state):
        self.__dict__ = state.copy()
        self.__dict__['_eclass_data_inst_cache'] = WeakValueDictionary()
Ejemplo n.º 43
0
class _WeakValueDictionary(object):
    # Maps from OID -> Persistent object, but
    # only weakly references the Persistent object. This is similar
    # to ``weakref.WeakValueDictionary``, but is customized depending on the
    # platform. On PyPy, all objects can cheaply use a WeakRef, so that's
    # what we actually use. On CPython, though, ``PersistentPy`` cannot be weakly
    # referenced, so we rely on the fact that the ``id()`` of an object is its
    # memory location, and we use ``ctypes`` to cast that integer back to
    # the object.
    #
    # To remove stale addresses, we rely on the ``ffi.gc()`` object with the exact
    # same lifetime as the ``PersistentPy`` object. It calls us, we get the ``id``
    # back out of the CData, and clean up.
    if PYPY: # pragma: no cover
        def __init__(self):
            self._data = WeakValueDictionary()

        def _from_addr(self, addr):
            return addr

        def _save_addr(self, oid, obj):
            return obj

        cleanup_hook = None
    else:
        def __init__(self):
            # careful not to require ctypes at import time; most likely the
            # C implementation is in use.
            import ctypes

            self._data = {}
            self._addr_to_oid = {}
            self._cast = ctypes.cast
            self._py_object = ctypes.py_object

        def _save_addr(self, oid, obj):
            i = id(obj)
            self._addr_to_oid[i] = oid
            return i

        def _from_addr(self, addr):
            return self._cast(addr, self._py_object).value

        def cleanup_hook(self, cdata):
            # This is called during GC, possibly at interpreter shutdown
            # when the __dict__ of this object may have already been cleared.
            try:
                addr_to_oid = self._addr_to_oid
            except AttributeError:
                return
            oid = addr_to_oid.pop(cdata.pobj_id, None)
            self._data.pop(oid, None)

    def __contains__(self, oid):
        return oid in self._data

    def __len__(self):
        return len(self._data)

    def __setitem__(self, key, value):
        addr = self._save_addr(key, value)
        self._data[key] = addr

    def pop(self, oid):
        return self._from_addr(self._data.pop(oid))

    def items(self):
        from_addr = self._from_addr
        for oid, addr in self._data.items():
            yield oid, from_addr(addr)

    def get(self, oid, default=None):
        addr = self._data.get(oid, self)
        if addr is self:
            return default
        return self._from_addr(addr)

    def __getitem__(self, oid):
        addr = self._data[oid]
        return self._from_addr(addr)
Ejemplo n.º 44
0
class CacheBlock (MutableMapping,HtmlPlugin):
  r"""
Instances of this class implements blocks of cells sharing the same functor.

:param db: specification of the cache repository where the block resides
:param functor: functor of the block
:type functor: :class:`Functor`
:param cacheonly: if :const:`True`, cell creation is disallowed
:type cacheonly: :class:`bool`

A :class:`CacheBlock` instance is callable, and calls take a single argument. Method :meth:`__call__` implements the cross-process cacheing mechanism which produces and reuses cache cells. It also implements a weak cache for local calls (within its process).

Furthermore, a :class:`CacheBlock` instance acts as a mapping where the keys are cell identifiers (:class:`int`) and values are tuples of meta-information about the cells (i.e. not the values of the cells: these are only accessible through calling).

Finally, :class:`CacheBlock` instances have an HTML ipython display.

Attributes:

.. attribute:: db

   the :class:`CacheDB` instance this block belongs to

.. attribute:: functor

   the functor for this block (field ``functor`` in the ``Block`` table of the index is the functor's pickle)

.. attribute:: block

   the :class:`int` identifier of this block (field ``oid`` in the ``Block`` table of the index)

.. attribute:: cacheonly

   whether cell creation is disabled

.. attribute:: memory

   a :class:`weakref.WeakValueDictionary` implementing a local cache of calls within the current process

Methods:

.. automethod:: __call__
  """
#==================================================================================================

  def __init__(self,db=None,functor=None,block=None,cacheonly=False):
    self.db = db = CacheDB(db)
    self.functor = functor
    self.block = db.getblock(functor) if block is None else block
    self.cacheonly = cacheonly
    self.memory = WeakValueDictionary()

  def __hash__(self): return hash((self.db,self.block))
  def __eq__(self,other): return isinstance(other,CacheBlock) and self.db is other.db and self.block == other.block

  def clear_error(self,dry_run=False):
    r"""
Clears all the cells from this block which cache an exception.
    """
    with self.db.connect() as conn:
      if dry_run: return [cell for cell, in conn.execute('SELECT oid FROM Cell WHERE block=? AND size<0',(self.block,))]
      conn.execute('DELETE FROM Cell WHERE block=? AND size<0',(self.block,))
      deleted = conn.total_changes
    if deleted>0: logger.info('%s DELETED(%s)',self,deleted)
    return deleted

  def clear_overflow(self,n,dry_run=False):
    r"""
Clears all the cells from this block except the *n* most recent (lru policy).
    """
    assert isinstance(n,int) and n>=1
    with self.db.connect() as conn:
      if dry_run: return [cell for cell, in conn.execute('SELECT oid FROM Cell WHERE block=? AND size>0 ORDER BY tstamp DESC, oid DESC LIMIT -1 OFFSET ?',(self.block,n))]
      conn.execute('DELETE FROM Cell WHERE oid IN (SELECT oid FROM Cell WHERE block=? AND size>0 ORDER BY tstamp DESC, oid DESC LIMIT -1 OFFSET ?)',(self.block,n))
      deleted = conn.total_changes
    if deleted>0: logger.info('%s DELETED(%s)',self,deleted)
    return deleted

  def info(self,typ=namedtuple('BlockInfo',('hits','ncell','ncell_error','ncell_pending'))):
    r"""
Returns information about this block. Available attributes:
:attr:`hits`, :attr:`ncell`, :attr:`ncell_error`, :attr:`ncell_pending`
    """
    with self.db.connect(detect_types=sqlite3.PARSE_DECLTYPES) as conn:
      ncell = dict(conn.execute('SELECT CASE WHEN size ISNULL THEN \'pending\' WHEN size<0 THEN \'error\' ELSE \'\' END AS status, count(*) FROM Cell WHERE block=? GROUP BY status',(self.block,)))
      hits, = conn.execute('SELECT sum(hits) FROM Cell WHERE block=?',(self.block,)).fetchone()
    return typ((hits or 0),sum(ncell.values()),*(ncell.get(k,0) for k in ('error','pending')))

#--------------------------------------------------------------------------------------------------
  def __call__(self,arg):
    """
:param arg: argument of the call

Implements cacheing as follows:

- Method :meth:`getkey` of the functor is invoked with argument *arg* to obtain a ``ckey``.
- If that ``ckey`` is present in the (local) memory mapping of this block, its associated value is returned.
- Otherwise, a transaction is begun on the index database.

  - If there already exists a cell with the same ``ckey``, method :meth:`lookup` of the storage is invoked to obtain a getter for that cell, then the transaction is terminated and the result is extracted, using the obtained getter. The cell's hit count is incremented.
  - If there does not exist a cell with the same ``ckey``, a cell with that ``ckey`` is created, and method :meth:`insert` of the storage is invoked to obtain a setter for that cell, then the transaction is terminated. Then, method :meth:`getval` of the functor is invoked with argument *arg* and its result is stored, even if it is an exception, using the obtained setter.

- If the result is an exception, it is raised.
- Otherwise, the memory mapping of this block is updated at key ``ckey`` with the result (if possible), and the result is returned.
    """
#--------------------------------------------------------------------------------------------------
    ckey = self.functor.getkey(arg)
    cval = self.memory.get(ckey)
    if cval is not None: return cval
    with self.db.connect() as conn:
      conn.execute('BEGIN IMMEDIATE TRANSACTION')
      row = conn.execute('SELECT oid,size FROM Cell WHERE block=? AND ckey=?',(self.block,ckey)).fetchone()
      if row is None:
        if self.cacheonly: raise Exception('Cache cell creation disallowed')
        cell = conn.execute('INSERT INTO Cell (block,ckey) VALUES (?,?)',(self.block,ckey)).lastrowid
        setval = self.db.storage.insert(cell)
      else:
        cell,size = row
        getval = self.db.storage.lookup(cell,size==0)
    if row is None:
      logger.info('%s MISS(%s)',self,cell)
      tm = process_time(),perf_counter()
      try: cval = self.functor.getval(arg)
      except BaseException as e: cval = e; size = -1
      else: size = 1
      tm = process_time()-tm[0],perf_counter()-tm[1]
      try: size *= setval(cval)
      except:
        with self.db.connect() as conn:
          conn.execute('DELETE FROM Cell WHERE oid=?',(cell,))
        raise
      with self.db.connect() as conn:
        conn.execute('UPDATE Cell SET size=?, tprc=?, ttot=?, tstamp=datetime(\'now\') WHERE oid=?',(size,tm[0],tm[1],cell))
        if not conn.total_changes: logger.info('%s LOST(%s)',self,cell)
      if size<0: raise cval
    else:
      if size==0: logger.info('%s WAIT(%s)',self,cell)
      cval = getval()
      logger.info('%s HIT(%s)',self,cell)
      with self.db.connect() as conn:
        conn.execute('UPDATE Cell SET hits=hits+1, tstamp=datetime(\'now\') WHERE oid=?',(cell,))
      if isinstance(cval,BaseException): raise cval
    try: self.memory[ckey] = cval
    except: pass
    return cval

#--------------------------------------------------------------------------------------------------
# CacheBlock as Mapping
#--------------------------------------------------------------------------------------------------

  def __getitem__(self,cell):
    with self.db.connect() as conn:
      r = conn.execute('SELECT ckey, tstamp, hits, size, tprc, ttot FROM Cell WHERE oid=?',(cell,)).fetchone()
    if r is None: raise KeyError(cell)
    return r

  def __delitem__(self,cell):
    with self.db.connect() as conn:
      conn.execute('DELETE FROM Cell WHERE oid=?',(cell,))
      if not conn.total_changes: raise KeyError(cell)

  def __setitem__(self,cell,v):
    raise Exception('Direct create/update not permitted on Cell')

  def __iter__(self):
    with self.db.connect() as conn:
      for cell, in conn.execute('SELECT oid FROM Cell WHERE block=?',(self.block,)): yield cell

  def __len__(self):
    with self.db.connect() as conn:
      return conn.execute('SELECT count(*) FROM Cell WHERE block=?',(self.block,)).fetchone()[0]

  def items(self):
    with self.db.connect() as conn:
      for row in conn.execute('SELECT oid, ckey, tstamp, hits, size, tprc, ttot FROM Cell WHERE block=?',(self.block,)):
        yield row[0],row[1:]

  def clear(self):
    with self.db.connect() as conn:
      conn.execute('DELETE FROM Cell WHERE block=?',(self.block,))

#--------------------------------------------------------------------------------------------------
# Display
#--------------------------------------------------------------------------------------------------

  def as_html(self,incontext,size_fmt_=(lambda sz: '*'+size_fmt(-sz) if sz<0 else size_fmt(sz)),time_fmt_=(lambda t: '' if t is None else time_fmt(t))):
    n = len(self)-self._html_limit
    L = self.items(); closing = None
    if n>0: L = islice(L,self._html_limit); closing = '{} more'.format(n)
    return html_table(sorted(L),hdrs=('ckey','tstamp','hits','size','tprc','ttot'),fmts=((lambda ckey,h=self.functor.html: h(ckey,incontext)),str,str,size_fmt_,time_fmt_,time_fmt_),opening='{}: {}'.format(self.block,self.functor),closing=closing)
  def __repr__(self): return 'Cache<{}:{}>'.format(self.db.path,self.functor)
Ejemplo n.º 45
0
class LargeCache:
    '''File-backed cache for large objects (particularly images).  Uses a
    WeakValueDictionary for in-memory storage, and a page-based format for
    on-disk.'''
    def __init__(self, data_file, index_file):
        # Dict storing currently loaded values
        self.cache = WeakValueDictionary()

        # Data file, and index mapping key to data offset
        self.data_file = data_file
        self.data_total = os.fstat(data_file.fileno()).st_size

        self.index_file = index_file
        self.index = {}

        self.used = set()

        # Read index data into dict
        self.index_file.seek(0)
        for line in self.index_file.readlines():
            parts = line.strip().split()
            if len(parts) != 2:
                continue
            offset_str, key_str = parts
            offset = int(offset_str)
            key = pickle.loads(base64.decodebytes(key_str.encode('ascii')))
            self.index[key] = offset

        # Seek both to EOF
        self.data_file.seek(0, os.SEEK_END)
        self.index_file.seek(0, os.SEEK_END)

    def contains(self, key):
        return key in self.cache or key in self.index

    def get(self, key):
        # Record the key use here, regardless of the outcome.  This assumes the
        # caller runs `add` only on keys for which it first ran `get`.
        self.used.add(key)

        # Try to fetch from in-memory cache
        value = self.cache.get(key)
        if value is not None:
            return value

        # Try to load from file
        if key in self.index:
            offset = self.index[key]
            self.data_file.seek(offset)
            value = _safe_load(self.data_file)
            self.cache[key] = value
            return value

        # No cached copy of this image
        return None

    def add(self, key, value):
        # Add to in-memory cache
        self.cache[key] = value

        # Write pickled value to next available page
        page = CACHE_PAGE
        offset = (self.data_total + page - 1) & ~(page - 1)
        self.data_file.seek(offset)
        _safe_dump(value, self.data_file)
        self.data_total = self.data_file.tell()

        # Write index line
        self.index[key] = offset
        key_str = base64.encodebytes(pickle.dumps(key)).decode('ascii')
        self.index_file.write('%d %s\n' % (offset, key_str.replace('\n', '')))

    def size(self):
        return len(self.index)

    def save(self):
        pass
Ejemplo n.º 46
0
Archivo: memory.py Proyecto: oeway/WorQ
class TaskQueue(AbstractTaskQueue):
    """Simple in-memory task queue implementation"""

    @classmethod
    def factory(cls, url, name=const.DEFAULT, *args, **kw):
        obj = _REFS.get((url, name))
        if obj is None:
            obj = _REFS[(url, name)] = cls(url, name, *args, **kw)
        return obj

    def __init__(self, *args, **kw):
        super(TaskQueue, self).__init__(*args, **kw)
        self.queue = Queue()
        self.results = WeakValueDictionary()
        self.results_lock = Lock()

    def _init_result(self, result, status, message):
        with self.results_lock:
            if result.id in self.results:
                return False
            self.results[result.id] = result
        result.__status = status
        result.__value = Queue()
        result.__task = message
        result.__args = {}
        result.__lock = Lock()
        result.__for = None
        return True

    def enqueue_task(self, result, message):
        if self._init_result(result, const.ENQUEUED, message):
            self.queue.put(result)
            return True
        return False

    def defer_task(self, result, message, args):
        if self._init_result(result, const.PENDING, message):
            results = self.results
            # keep references to results to prevent GC
            result.__refs = [results.get(arg) for arg in args]
            return True
        return False

    def undefer_task(self, task_id):
        result = self.results[task_id]
        self.queue.put(result)

    def get(self, timeout=None):
        try:
            result = self.queue.get(timeout=timeout)
        except Empty:
            return None
        result.__status = const.PROCESSING
        return result.id, result.__task

    def size(self):
        return len(self.results)

    def discard_pending(self):
        with self.results_lock:
            while True:
                try:
                    self.queue.get_nowait()
                except Empty:
                    break
            self.results.clear()

    def reserve_argument(self, argument_id, deferred_id):
        result = self.results.get(argument_id)
        if result is None:
            return (False, None)
        with result.__lock:
            if result.__for is not None:
                return (False, None)
            result.__for = deferred_id
            try:
                message = result.__value.get_nowait()
            except Empty:
                message = None
            if message is not None:
                with self.results_lock:
                    self.results.pop(argument_id, None)
            return (True, message)

    def set_argument(self, task_id, argument_id, message):
        result = self.results[task_id]
        with self.results_lock:
            self.results.pop(argument_id, None)
        with result.__lock:
            result.__args[argument_id] = message
            return len(result.__args) == len(result.__refs)

    def get_arguments(self, task_id):
        try:
            return self.results[task_id].__args
        except KeyError:
            return {}

    def set_task_timeout(self, task_id, timeout):
        pass

    def get_status(self, task_id):
        result = self.results.get(task_id)
        return None if result is None else result.__status

    def set_result(self, task_id, message, timeout):
        result = self.results.get(task_id)
        if result is not None:
            with result.__lock:
                result.__value.put(message)
                return result.__for

    def pop_result(self, task_id, timeout):
        result = self.results.get(task_id)
        if result is None:
            return const.TASK_EXPIRED
#        with result.__lock:
#            if result.__for is not None:
#                raise NotImplementedError
#                #return const.RESERVED
#            result.__for = task_id
        try:
            if timeout == 0:
                value = result.__value.get_nowait()
            else:
                value = result.__value.get(timeout=timeout)
        except Empty:
            value = None
        else:
            self.results.pop(task_id, None)
        return value

    def discard_result(self, task_id, task_expired_token):
        result = self.results.pop(task_id)
        if result is not None:
            result.__value.put(task_expired_token)
Ejemplo n.º 47
0
class TaurusFactory(object):
    """The base class for valid Factories in Taurus."""

    schemes = ()  # reimplement in derived classes to declare supported schemes
    caseSensitive = True  # reimplement if your scheme is case insensitive

    elementTypesMap = None  # reimplement in derived classes to profit from
    # generic implementations of getAuthority,
    # getDevice, getAttribute, findObjectClass, etc.
    # see findObjectClass for more details

    DefaultPollingPeriod = 3000

    def __init__(self):
        atexit.register(self.cleanUp)
        self._polling_period = self.DefaultPollingPeriod
        self.polling_timers = {}
        self._polling_enabled = True
        self._attrs = WeakValueDictionary()
        self._devs = WeakValueDictionary()
        self._auths = WeakValueDictionary()

        from . import taurusmanager
        manager = taurusmanager.TaurusManager()
        self._serialization_mode = manager.getSerializationMode()

    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
    # API for cleanUp at exit
    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-

    def cleanUp(self):
        """Reimplement if you need to execute code on program execution exit.
        Default implementation does nothing.
        """
        pass

    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
    # API for serialization
    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-

    def setSerializationMode(self, mode):
        """Sets the serialization mode for the system.

        :param mode: (TaurusSerializationMode) the new serialization mode
        """
        self._serialization_mode = mode

    def getSerializationMode(self):
        """Gives the serialization operation mode.

        :return: (TaurusSerializationMode) the current serialization mode
        """
        return self._serialization_mode

    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
    # API to get objects. Generic implementation. You may want to reimplement
    # it in your scheme factory
    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-

    def getAuthority(self, name=None):
        """Obtain the model object corresponding to the given authority name.
        If the corresponding authority already exists, the existing instance
        is returned. Otherwise a new instance is stored and returned.

        :param name: (str) authority name

        :return: a taurus.core.taurusauthority.TaurusAuthority object
        :raises: :TaurusException: if the given name is invalid.
        """
        v = self.getAuthorityNameValidator()
        if not v.isValid(name):
            msg = "Invalid {scheme} authority name '{name}'".format(
                scheme=self.schemes[0], name=name)
            raise TaurusException(msg)

        fullname, _, _ = v.getNames(name)
        auth = self._auths.get(fullname)
        if auth is not None:
            return auth

        cls = self.elementTypesMap[TaurusElementType.Authority]
        auth = cls(name=fullname)
        self._auths[fullname] = auth
        return auth

    def getDevice(self, name, **kw):
        """Obtain the model object corresponding to the given device name.
        If the corresponding device already exists, the existing instance
        is returned. Otherwise a new instance is stored and returned.

        :param name: (str) device name

        :return: a taurus.core.taurusdevice.TaurusDevice object
        :raises: :TaurusException: if the given name is invalid.
        """
        v = self.getDeviceNameValidator()
        if not v.isValid(name):
            msg = "Invalid {scheme} device name '{name}'".format(
                scheme=self.schemes[0], name=name)
            raise TaurusException(msg)

        fullname, _, _ = v.getNames(name)
        dev = self._devs.get(fullname)
        if dev is not None:
            return dev

        try:
            # this works if the authority name is present in the dev full name
            # (which in principle should always be the case)
            authname = v.getUriGroups(fullname)['authority']
            auth = self.getAuthority(authname)
        except:
            self.debug('Cannot get device parent from name "%s"', fullname)
            auth = None

        cls = self.elementTypesMap[TaurusElementType.Device]
        dev = cls(name=fullname, parent=auth)
        self._devs[fullname] = dev
        return dev

    def getAttribute(self, name):
        """ Obtain the model object corresponding to the given attribute name.
        If the corresponding attribute already exists, the existing instance
        is returned. Otherwise a new instance is stored and returned.

        :param name: (str) attribute name

        :return: a taurus.core.taurusattribute.TaurusAttribute object
        :raises: :TaurusException: if the given name is invalid.
        """
        v = self.getAttributeNameValidator()
        if not v.isValid(name):
            msg = "Invalid {scheme} attribute name '{name}'".format(
                scheme=self.schemes[0], name=name)
            raise TaurusException(msg)

        fullname, _, _ = v.getNames(name)
        attr = self._attrs.get(fullname)
        if attr is not None:
            return attr

        try:
            # this works only if the devname is present in the attr full name
            # (not all schemes are constructed in this way)
            devname = v.getUriGroups(fullname)['devname']
            dev = self.getDevice(devname)
        except:
            self.debug('Cannot get attribute parent from name "%s"', fullname)
            dev = None

        cls = self.elementTypesMap[TaurusElementType.Attribute]
        attr = cls(name=fullname, parent=dev)
        self._attrs[fullname] = attr
        return attr

    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
    # Methods that must be implemented by the specific Factory
    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-

    def getAuthorityNameValidator(self):
        raise NotImplementedError("getAuthorityNameValidator cannot be called"
                                  " for abstract TaurusFactory")

    def getDeviceNameValidator(self):
        raise NotImplementedError("getDeviceNameValidator cannot be called"
                                  " for abstract TaurusFactory")

    def getAttributeNameValidator(self):
        raise NotImplementedError("getAttributeNameValidator cannot be called"
                                  " for abstract TaurusFactory")

    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
    # Factory extension API
    # Override the following methods if you need to provide special classes for
    # special object types
    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-

    def registerAttributeClass(self, attr_name, attr_klass):
        pass

    def unregisterAttributeClass(self, attr_name):
        pass

    def registerDeviceClass(self, dev_klass_name, dev_klass):
        pass

    def unregisterDeviceClass(self, dev_klass_name):
        pass

    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
    # Generic methods
    #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-

    def supportsScheme(self, scheme):
        """Returns whether the given scheme is supported by this factory

        :param scheme: (str) the name of the schem to be checked

        :return: (bool) True if the scheme is supported (False otherwise)
        """
        return scheme in self.schemes

    def findObject(self, absolute_name):
        """ Must give an absolute name"""
        if not absolute_name:
            return None
        obj = None
        cls = self.findObjectClass(absolute_name)
        if cls:
            obj = self.getObject(cls, absolute_name)
        return obj

    def getObject(self, cls, name):
        t4_msg = 'The TaurusConfiguration classes are deprecated in tep14'
        if issubclass(cls, TaurusAuthority):
            return self.getAuthority(name)
        elif issubclass(cls, TaurusDevice):
            return self.getDevice(name)
        elif issubclass(cls, TaurusAttribute):
            return self.getAttribute(name)
        # For backward compatibility
        elif issubclass(cls, TaurusConfiguration):
            self.deprecated(dep='TaurusConfiguration',
                            alt='TaurusAttribute',
                            rel='4.0',
                            dbg_msg=t4_msg)
            return self.getAttribute(name)
        elif issubclass(cls, TaurusConfigurationProxy):
            self.deprecated(dep='TaurusConfigurationProxy',
                            alt='TaurusAttribute',
                            rel='4.0',
                            dbg_msg=t4_msg)
            return self.getAttribute(name)
        else:
            return None

    def changeDefaultPollingPeriod(self, period):
        if period > 0:
            self._polling_period = period

    def getDefaultPollingPeriod(self):
        return self._polling_period

    def isPollingEnabled(self):
        """Tells if the Taurus polling is enabled

           :return: (bool) whether or not the polling is enabled
        """
        return self._polling_enabled

    def disablePolling(self):
        """Disable the application tango polling"""
        if not self.isPollingEnabled():
            return
        self._polling_enabled = False
        for period, timer in self.polling_timers.items():
            timer.stop()

    def enablePolling(self):
        """Enable the application tango polling"""
        if self.isPollingEnabled():
            return
        for period, timer in self.polling_timers.items():
            timer.start()
        self._polling_enabled = True

    def addAttributeToPolling(self, attribute, period, unsubscribe_evts=False):
        """Activates the polling (client side) for the given attribute with the
           given period (seconds).

           :param attribute: (taurus.core.tango.TangoAttribute) attribute name.
           :param period: (float) polling period (in seconds)
           :param unsubscribe_evts: (bool) whether or not to unsubscribe from events
        """
        tmr = self.polling_timers.get(period, TaurusPollingTimer(period))
        self.polling_timers[period] = tmr
        tmr.addAttribute(attribute, self.isPollingEnabled())

    def removeAttributeFromPolling(self, attribute):
        """Deactivate the polling (client side) for the given attribute. If the
           polling of the attribute was not previously enabled, nothing happens.

           :param attribute: (str) attribute name.
        """
        p = None
        for period, timer in self.polling_timers.items():
            if timer.containsAttribute(attribute):
                timer.removeAttribute(attribute)
                if timer.getAttributeCount() == 0:
                    p = period
                break
        if p:
            del self.polling_timers[period]

    def __str__(self):
        return '{0}()'.format(self.__class__.__name__)

    def __repr__(self):
        return '{0}(schemes={1})'.format(self.__class__.__name__,
                                         ", ".join(self.schemes))

    def getValidTypesForName(self, name, strict=None):
        '''
        Returns a list of all Taurus element types for which `name` is a valid
        model name (while in many cases a name may only be valid for one
        element type, this is not necessarily true in general)

        In this base implementation, name is checked first for Attribute, then
        for Device and finally for Authority, and the return value is sorted in
        that same order.

        If a given schema requires a different ordering, reimplement this method

        :param name: (str) taurus model name

        :return: (list<TaurusElementType.element>) where element can be one of:
                 `Attribute`, `Device` or `Authority`
        '''
        ret = []
        if self.getAttributeNameValidator().isValid(name, strict=strict):
            ret.append(TaurusElementType.Attribute)
        if self.getDeviceNameValidator().isValid(name, strict=strict):
            ret.append(TaurusElementType.Device)
        if self.getAuthorityNameValidator().isValid(name, strict=strict):
            ret.append(TaurusElementType.Authority)
        return ret

    def getValidatorFromName(self, name):
        """
        Obtain the validator object corresponding to the given model
        name. If the model name is not valid for any TaurusModel class,
        it returns None
        """
        modeltypes = self.getValidTypesForName(name)
        if not modeltypes:
            return None
        return self.elementTypesMap[modeltypes[0]].getNameValidator()

    def findObjectClass(self, absolute_name):
        """
        Obtain the class object corresponding to the given name.

        Note, this generic implementation expects that derived classes provide a
        an attribute called elementTypesMap consisting in a dictionary whose
        keys are TaurusElementTypes and whose values are the corresponding
        specific object classes. e.g., the FooFactory should provide::

          class FooFactory(TaurusFactory):
              elementTypesMap = {TaurusElementType.Authority: FooAuthority,
                                 TaurusElementType.Device: FooDevice,
                                 TaurusElementType.Attribute: FooAttribute,
                                 }
              (...)


        :param absolute_name: (str) the object absolute name string

        :return: (taurus.core.taurusmodel.TaurusModel or None) a TaurusModel
                 class derived type or None if the name is not valid

        """
        try:
            elementTypesMap = self.elementTypesMap
        except AttributeError:
            msg = ('generic findObjectClass called but %s does ' +
                   'not define elementTypesMap.') % self.__class__.__name__
            raise RuntimeError(msg)
        for t in self.getValidTypesForName(absolute_name):
            ret = elementTypesMap.get(t, None)
            if ret is not None:
                return ret
        return None
Ejemplo n.º 48
0
class ChannelManager(object):
    """ High level interface for channels

    This class handles:

    * configuration of channels
    * high level api to create and remove jobs (notify, remove_job, remove_db)
    * get jobs to run

    Here is how the runner will use it.

    Let's create a channel manager and configure it.

    >>> from pprint import pprint as pp
    >>> cm = ChannelManager()
    >>> cm.simple_configure('root:4,A:4,B:1')
    >>> db = 'db'

    Add a few jobs in channel A with priority 10

    >>> cm.notify(db, 'A', 'A1', 1, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A2', 2, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A3', 3, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A4', 4, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A5', 5, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A6', 6, 0, 10, None, 'pending')

    Add a few jobs in channel B with priority 5

    >>> cm.notify(db, 'B', 'B1', 1, 0, 5, None, 'pending')
    >>> cm.notify(db, 'B', 'B2', 2, 0, 5, None, 'pending')

    We must now run one job from queue B which has a capacity of 1
    and 3 jobs from queue A so the root channel capacity of 4 is filled.

    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob B1>, <ChannelJob A1>, <ChannelJob A2>, <ChannelJob A3>]

    Job A2 is done. Next job to run is A5, even if we have
    higher priority job in channel B, because channel B has a capacity of 1.

    >>> cm.notify(db, 'A', 'A2', 2, 0, 10, None, 'done')
    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob A4>]

    Job B1 is done. Next job to run is B2 because it has higher priority.

    >>> cm.notify(db, 'B', 'B1', 1, 0, 5, None, 'done')
    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob B2>]

    Let's say A1 is done and A6 gets a higher priority. A6 will run next.

    >>> cm.notify(db, 'A', 'A1', 1, 0, 10, None, 'done')
    >>> cm.notify(db, 'A', 'A6', 6, 0, 5, None, 'pending')
    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob A6>]

    Let's test the throttling mechanism. Configure a 2 seconds delay
    on channel A, end enqueue two jobs.

    >>> cm = ChannelManager()
    >>> cm.simple_configure('root:4,A:4:throttle=2')
    >>> cm.notify(db, 'A', 'A1', 1, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A2', 2, 0, 10, None, 'pending')

    We have only one job to run, because of the throttle.

    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob A1>]
    >>> cm.get_wakeup_time()
    102

    We have no job to run, because of the throttle.

    >>> pp(list(cm.get_jobs_to_run(now=101)))
    []
    >>> cm.get_wakeup_time()
    102

    2 seconds later, we can run the other job (even though the first one
    is still running, because we have enough capacity).

    >>> pp(list(cm.get_jobs_to_run(now=102)))
    [<ChannelJob A2>]
    >>> cm.get_wakeup_time()
    104

    Let's test throttling in combination with a queue reaching full capacity.

    >>> cm = ChannelManager()
    >>> cm.simple_configure('root:4,T:2:throttle=2')
    >>> cm.notify(db, 'T', 'T1', 1, 0, 10, None, 'pending')
    >>> cm.notify(db, 'T', 'T2', 2, 0, 10, None, 'pending')
    >>> cm.notify(db, 'T', 'T3', 3, 0, 10, None, 'pending')

    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob T1>]
    >>> pp(list(cm.get_jobs_to_run(now=102)))
    [<ChannelJob T2>]

    Channel is now full, so no job to run even though throttling
    delay is over.

    >>> pp(list(cm.get_jobs_to_run(now=103)))
    []
    >>> cm.get_wakeup_time()  # no wakeup time, since queue is full
    0
    >>> pp(list(cm.get_jobs_to_run(now=104)))
    []
    >>> cm.get_wakeup_time()  # queue is still full
    0

    >>> cm.notify(db, 'T', 'T1', 1, 0, 10, None, 'done')
    >>> pp(list(cm.get_jobs_to_run(now=105)))
    [<ChannelJob T3>]
    >>> cm.get_wakeup_time()  # queue is full
    0
    >>> cm.notify(db, 'T', 'T2', 1, 0, 10, None, 'done')
    >>> cm.get_wakeup_time()
    107

    Test wakeup time behaviour in presence of eta.

    >>> cm = ChannelManager()
    >>> cm.simple_configure('root:4,E:1')
    >>> cm.notify(db, 'E', 'E1', 1, 0, 10, None, 'pending')
    >>> cm.notify(db, 'E', 'E2', 2, 0, 10, None, 'pending')
    >>> cm.notify(db, 'E', 'E3', 3, 0, 10, None, 'pending')

    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob E1>]
    >>> pp(list(cm.get_jobs_to_run(now=101)))
    []
    >>> cm.notify(db, 'E', 'E1', 1, 0, 10, 105, 'pending')
    >>> cm.get_wakeup_time()  # wakeup at eta
    105
    >>> pp(list(cm.get_jobs_to_run(now=102)))  # but there is capacity
    [<ChannelJob E2>]
    >>> pp(list(cm.get_jobs_to_run(now=106)))  # no capacity anymore
    []
    >>> cm.get_wakeup_time()  # no timed wakeup because no capacity
    0
    >>> cm.notify(db, 'E', 'E2', 1, 0, 10, None, 'done')
    >>> cm.get_wakeup_time()
    105
    >>> pp(list(cm.get_jobs_to_run(now=107)))  # no capacity anymore
    [<ChannelJob E1>]
    >>> cm.get_wakeup_time()
    0

    Test wakeup time behaviour in a sequential queue.

    >>> cm = ChannelManager()
    >>> cm.simple_configure('root:4,S:1:sequential')
    >>> cm.notify(db, 'S', 'S1', 1, 0, 10, None, 'pending')
    >>> cm.notify(db, 'S', 'S2', 2, 0, 10, None, 'pending')
    >>> cm.notify(db, 'S', 'S3', 3, 0, 10, None, 'pending')

    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob S1>]
    >>> cm.notify(db, 'S', 'S1', 1, 0, 10, None, 'failed')
    >>> pp(list(cm.get_jobs_to_run(now=101)))
    []
    >>> cm.notify(db, 'S', 'S2', 2, 0, 10, 105, 'pending')
    >>> pp(list(cm.get_jobs_to_run(now=102)))
    []

    No wakeup time because due to eta, because the sequential queue
    is waiting for a failed job.

    >>> cm.get_wakeup_time()
    0
    >>> cm.notify(db, 'S', 'S1', 1, 0, 10, None, 'pending')
    >>> cm.get_wakeup_time()
    105
    >>> pp(list(cm.get_jobs_to_run(now=102)))
    [<ChannelJob S1>]
    >>> pp(list(cm.get_jobs_to_run(now=103)))
    []
    >>> cm.notify(db, 'S', 'S1', 1, 0, 10, None, 'done')

    At this stage, we have S2 with an eta of 105 and since the
    queue is sequential, we wait for it.

    >>> pp(list(cm.get_jobs_to_run(now=103)))
    []
    >>> pp(list(cm.get_jobs_to_run(now=105)))
    [<ChannelJob S2>]
    >>> cm.notify(db, 'S', 'S2', 2, 0, 10, 105, 'done')
    >>> pp(list(cm.get_jobs_to_run(now=105)))
    [<ChannelJob S3>]
    >>> cm.notify(db, 'S', 'S3', 3, 0, 10, None, 'done')
    >>> pp(list(cm.get_jobs_to_run(now=105)))
    []

    """
    def __init__(self):
        self._jobs_by_uuid = WeakValueDictionary()
        self._root_channel = Channel(name='root', parent=None, capacity=1)
        self._channels_by_name = WeakValueDictionary(root=self._root_channel)

    @classmethod
    def parse_simple_config(cls, config_string):
        """Parse a simple channels configuration string.

        The general form is as follow:
        channel(.subchannel)*(:capacity(:key(=value)?)*)? [, ...]

        If capacity is absent, it defaults to 1.
        If a key is present without value, it gets True as value.
        When declaring subchannels, the root channel may be omitted
        (ie sub:4 is the same as root.sub:4).

        Returns a list of channel configuration dictionaries.

        >>> from pprint import pprint as pp
        >>> pp(ChannelManager.parse_simple_config('root:4'))
        [{'capacity': 4, 'name': 'root'}]
        >>> pp(ChannelManager.parse_simple_config('root:4,root.sub:2'))
        [{'capacity': 4, 'name': 'root'}, {'capacity': 2, 'name': 'root.sub'}]
        >>> pp(ChannelManager.parse_simple_config('root:4,root.sub:2:'
        ...                                       'sequential:k=v'))
        [{'capacity': 4, 'name': 'root'},
         {'capacity': 2, 'k': 'v', 'name': 'root.sub', 'sequential': True}]
        >>> pp(ChannelManager.parse_simple_config('root'))
        [{'capacity': 1, 'name': 'root'}]
        >>> pp(ChannelManager.parse_simple_config('sub:2'))
        [{'capacity': 2, 'name': 'sub'}]

        It ignores whitespace around values, and drops empty entries which
        would be generated by trailing commas, or commented lines on the Odoo
        config file.

        >>> pp(ChannelManager.parse_simple_config('''
        ...     root : 4,
        ...     ,
        ...     foo bar:1: k=va lue,
        ... '''))
        [{'capacity': 4, 'name': 'root'},
         {'capacity': 1, 'k': 'va lue', 'name': 'foo bar'}]

        It's also possible to replace commas with line breaks, which is more
        readable if the channel configuration comes from the odoo config file.

        >>> pp(ChannelManager.parse_simple_config('''
        ...     root : 4
        ...     foo bar:1: k=va lue
        ...     baz
        ... '''))
        [{'capacity': 4, 'name': 'root'},
         {'capacity': 1, 'k': 'va lue', 'name': 'foo bar'},
         {'capacity': 1, 'name': 'baz'}]
        """
        res = []
        config_string = config_string.replace("\n", ",")
        for channel_config_string in split_strip(config_string, ','):
            if not channel_config_string:
                # ignore empty entries (commented lines, trailing commas)
                continue
            config = {}
            config_items = split_strip(channel_config_string, ':')
            name = config_items[0]
            if not name:
                raise ValueError('Invalid channel config %s: '
                                 'missing channel name' % config_string)
            config['name'] = name
            if len(config_items) > 1:
                capacity = config_items[1]
                try:
                    config['capacity'] = int(capacity)
                except:
                    raise ValueError('Invalid channel config %s: '
                                     'invalid capacity %s' %
                                     (config_string, capacity))
                for config_item in config_items[2:]:
                    kv = split_strip(config_item, '=')
                    if len(kv) == 1:
                        k, v = kv[0], True
                    elif len(kv) == 2:
                        k, v = kv
                    else:
                        raise ValueError('Invalid channel config %s: '
                                         'incorrect config item %s' %
                                         (config_string, config_item))
                    if k in config:
                        raise ValueError('Invalid channel config %s: '
                                         'duplicate key %s' %
                                         (config_string, k))
                    config[k] = v
            else:
                config['capacity'] = 1
            res.append(config)
        return res

    def simple_configure(self, config_string):
        """Configure the channel manager from a simple configuration string

        >>> cm = ChannelManager()
        >>> c = cm.get_channel_by_name('root')
        >>> c.capacity
        1
        >>> cm.simple_configure('root:4,autosub.sub:2,seq:1:sequential')
        >>> cm.get_channel_by_name('root').capacity
        4
        >>> cm.get_channel_by_name('root').sequential
        False
        >>> cm.get_channel_by_name('root.autosub').capacity
        >>> cm.get_channel_by_name('root.autosub.sub').capacity
        2
        >>> cm.get_channel_by_name('root.autosub.sub').sequential
        False
        >>> cm.get_channel_by_name('autosub.sub').capacity
        2
        >>> cm.get_channel_by_name('seq').capacity
        1
        >>> cm.get_channel_by_name('seq').sequential
        True
        """
        for config in ChannelManager.parse_simple_config(config_string):
            self.get_channel_from_config(config)

    def get_channel_from_config(self, config):
        """Return a Channel object from a parsed configuration.

        If the channel does not exist it is created.
        The configuration is applied on the channel before returning it.
        If some of the parent channels are missing when creating a subchannel,
        the parent channels are auto created with an infinite capacity
        (except for the root channel, which defaults to a capacity of 1
        when not configured explicity).
        """
        channel = self.get_channel_by_name(config['name'], autocreate=True)
        channel.configure(config)
        _logger.info("Configured channel: %s", channel)
        return channel

    def get_channel_by_name(self, channel_name, autocreate=False):
        """Return a Channel object by its name.

        If it does not exist and autocreate is True, it is created
        with a default configuration and inserted in the Channels structure.
        If autocreate is False and the channel does not exist, an exception
        is raised.

        >>> cm = ChannelManager()
        >>> c = cm.get_channel_by_name('root', autocreate=False)
        >>> c.name
        'root'
        >>> c.fullname
        'root'
        >>> c = cm.get_channel_by_name('root.sub', autocreate=True)
        >>> c.name
        'sub'
        >>> c.fullname
        'root.sub'
        >>> c = cm.get_channel_by_name('sub', autocreate=True)
        >>> c.name
        'sub'
        >>> c.fullname
        'root.sub'
        >>> c = cm.get_channel_by_name('autosub.sub', autocreate=True)
        >>> c.name
        'sub'
        >>> c.fullname
        'root.autosub.sub'
        >>> c = cm.get_channel_by_name(None)
        >>> c.fullname
        'root'
        >>> c = cm.get_channel_by_name('root.sub')
        >>> c.fullname
        'root.sub'
        >>> c = cm.get_channel_by_name('sub')
        >>> c.fullname
        'root.sub'
        """
        if not channel_name or channel_name == self._root_channel.name:
            return self._root_channel
        if not channel_name.startswith(self._root_channel.name + '.'):
            channel_name = self._root_channel.name + '.' + channel_name
        if channel_name in self._channels_by_name:
            return self._channels_by_name[channel_name]
        if not autocreate:
            raise ChannelNotFound('Channel %s not found' % channel_name)
        parent = self._root_channel
        for subchannel_name in channel_name.split('.')[1:]:
            subchannel = parent.get_subchannel_by_name(subchannel_name)
            if not subchannel:
                subchannel = Channel(subchannel_name, parent, capacity=None)
                self._channels_by_name[subchannel.fullname] = subchannel
            parent = subchannel
        return parent

    def notify(self, db_name, channel_name, uuid, seq, date_created, priority,
               eta, state):
        try:
            channel = self.get_channel_by_name(channel_name)
        except ChannelNotFound:
            _logger.warning(
                'unknown channel %s, '
                'using root channel for job %s', channel_name, uuid)
            channel = self._root_channel
        job = self._jobs_by_uuid.get(uuid)
        if job:
            # db_name is invariant
            assert job.db_name == db_name
            # date_created is invariant
            assert job.date_created == date_created
            # if one of the job properties that influence
            # scheduling order has changed, we remove the job
            # from the queues and create a new job object
            if (seq != job.seq or priority != job.priority or eta != job.eta
                    or channel != job.channel):
                _logger.debug("job %s properties changed, rescheduling it",
                              uuid)
                self.remove_job(uuid)
                job = None
        if not job:
            job = ChannelJob(db_name, channel, uuid, seq, date_created,
                             priority, eta)
            self._jobs_by_uuid[uuid] = job
        # state transitions
        if not state or state == DONE:
            job.channel.set_done(job)
        elif state == PENDING:
            job.channel.set_pending(job)
        elif state in (ENQUEUED, STARTED):
            job.channel.set_running(job)
        elif state == FAILED:
            job.channel.set_failed(job)
        else:
            _logger.error("unexpected state %s for job %s", state, job)

    def remove_job(self, uuid):
        job = self._jobs_by_uuid.get(uuid)
        if job:
            job.channel.remove(job)
            del self._jobs_by_uuid[job.uuid]

    def remove_db(self, db_name):
        for job in self._jobs_by_uuid.values():
            if job.db_name == db_name:
                job.channel.remove(job)
                del self._jobs_by_uuid[job.uuid]

    def get_jobs_to_run(self, now):
        return self._root_channel.get_jobs_to_run(now)

    def get_wakeup_time(self):
        return self._root_channel.get_wakeup_time()
Ejemplo n.º 49
0
class ValueCache:
    """Checksum-to-value cache.
    Every value is refered to by a expression (or more than one); non-expression values are
    cached elsewhere.

    Memory intensive. Like any other cache, does not persist unless offloaded.
    Cache comes in two flavors: buffer cache and object cache.
    Buffer cache items are directly serializable, and can be shared over the
    network, or offloaded to Redis.
     Keys are straightforward buffer checksums.
     Each item has two refcounts, authoritative and non-authoritative,
     corresponding to the authority of refering expression(s).
     Trees referring to structured-cells-with-partial-authority count as
     authoritative.
    Object cache items contain Python objects. They are strictly local.
     Keys are *semantic keys*: consisting of a *semantic checksum* (i.e.
     different from the buffer checksum in case of CSON and Python code expressions),
     an access mode, and a content type.
     They are maintained as a WeakValueDictionary, i.e. they get auto-cleaned-up
     if Python holds no reference to them.
     A worker that accesses them gets a Python reference.
     cell.value gets a Python reference too, for non-structured cells.
     Structured cells (.value, .data, .handle) do NOT get a Python reference;
      instead, the underlying Monitor gets backed up by an API that retrieves
      the value from object cache when needed (and does not store it)
    There are two ways an item can be requested:
    1. Explicitly from object cache or buffer cache, using a semantic key resp.
      a buffer checksum
    2. Items can be requested using a expression, from which the buffer checksum can be
     extracted, and a semantic key be generated or read from cache.
    Whenever a item is requested from object cache that is a partial miss
     (a miss from object cache but a hit from buffer cache), the item gets
     generated from buffer cache.
     In this case, and also if a new object item is added explicitly,
     a temporary reference to  the item is added, that expires after 20 seconds

    For cells, there is normally only one buffer cache item, because all expressions
     of the same cell._storage_type are mapped to it.
    In contrast, every expression typically has its own semantic checksum (reflecting
     both cell type and subpath), and therefore its own object cache item.
    """
    def __init__(self, manager):
        value_caches.add(self)
        self.manager = weakref.ref(manager) if manager is not None else lambda: None
        self._buffer_cache = {} #buffer-checksum-to-(refcount, refcount, value)
        self._object_cache = WeakValueDictionary() #semantic-key-to-value

        # TODO: caches for annotation purposes: reverse caches, ...

    def incref(self, buffer_checksum, buffer, *, has_auth):
        """Increase refcount for buffer checksum
        Add an extra non-auth refcount that expires after 20 secs (unless smaller than 100k).
        
        Returns True if the full buffer has been successfully inserted
        If buffer is not None, this is always the case
        If buffer is None:
            - If the checksum is unknown, a dummy item is inserted
              (returns False)
            - If a dummy item is found, it is incref'ed
              (returns False)
            - If a full item (with non-None buffer) is found, it is incref'ed
              (returns True)
        """
        #print("INCREF", buffer_checksum.hex(), buffer)        
        item = self._buffer_cache.get(buffer_checksum)        
        if item is None:            
            if has_auth:
                item = 1, 1, buffer
            else:
                item = 0, 2, buffer
        else:
            if item[2] is not None:
                buffer = item[2] # *should* be equal, 
                                # if buffer is not None, 
                                # and everyone is honest 
                                #   (i.e. checksum is not spoofed)
            if has_auth:
                item = item[0] + 1, item[1] + 1, buffer
            else:
                item = item[0], item[1] + 2, buffer
        success = (buffer is not None)                        
        mgr = self.manager()
        if mgr is not None:
            tempref = functools.partial(self.decref, buffer_checksum, has_auth=False)
            mgr.temprefmanager.add_ref(tempref, 20.0) 
        self._buffer_cache[buffer_checksum] = item        
        redis_sinks.set_value(buffer_checksum, buffer)

        return success

    def decref(self, buffer_checksum, *, has_auth):
        item = self._buffer_cache[buffer_checksum]
        refcount_auth, refcount_nauth, buffer = item
        if (buffer is None or len(buffer) <= NO_EXPIRE_SIZE_LIMIT):
            return
        if has_auth:
            assert refcount_auth > 0
            if refcount_auth == 1 and refcount_nauth == 0:
                self._buffer_cache.pop(buffer_checksum)
                return
            item = refcount_auth - 1, refcount_nauth, buffer
            self._buffer_cache[buffer_checksum] = item
        else:
            assert refcount_nauth > 0
            if refcount_auth == 0 and refcount_nauth == 1:
                self._buffer_cache.pop(buffer_checksum)
                return
            item = refcount_auth, refcount_nauth - 1, buffer
            self._buffer_cache[buffer_checksum] = item

    def add_semantic_key(self, semantic_key, value):
        assert isinstance(semantic_key, SemanticKey)        
        try:
            self._object_cache[semantic_key] = value
            hash(value)
        except TypeError:
            value = WeakrefableWrapper(value)
            self._object_cache[semantic_key] = value
        self.manager().temprefmanager.add_ref(value, 20.0)

    def get_object(self, semantic_key):
        assert isinstance(semantic_key, SemanticKey)
        item = self._object_cache.get(semantic_key)
        if isinstance(item, WeakrefableWrapper):
            return item.value
        else:
            return item

    def get_buffer(self, checksum):
        if checksum is None:
            return None
        item = self._buffer_cache.get(checksum)
        if item is None or item[2] is None:
            item = None
            value = redis_caches.get_value(checksum)
            if value is not None:
                item = 1, 1, value
        return item

    def value_check(self, checksum):
        """For the communionserver..."""
        if checksum in self._buffer_cache:
            return True
        return redis_caches.has_value(checksum)
Ejemplo n.º 50
0
class ChannelManager(object):
    """ High level interface for channels

    This class handles:

    * configuration of channels
    * high level api to create and remove jobs (notify, remove_job, remove_db)
    * get jobs to run

    Here is how the runner will use it.

    Let's create a channel manager and configure it.

    >>> from pprint import pprint as pp
    >>> cm = ChannelManager()
    >>> cm.simple_configure('root:4,A:4,B:1')
    >>> db = 'db'

    Add a few jobs in channel A with priority 10

    >>> cm.notify(db, 'A', 'A1', 1, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A2', 2, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A3', 3, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A4', 4, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A5', 5, 0, 10, None, 'pending')
    >>> cm.notify(db, 'A', 'A6', 6, 0, 10, None, 'pending')

    Add a few jobs in channel B with priority 5

    >>> cm.notify(db, 'B', 'B1', 1, 0, 5, None, 'pending')
    >>> cm.notify(db, 'B', 'B2', 2, 0, 5, None, 'pending')

    We must now run one job from queue B which has a capacity of 1
    and 3 jobs from queue A so the root channel capacity of 4 is filled.

    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob B1>, <ChannelJob A1>, <ChannelJob A2>, <ChannelJob A3>]

    Job A2 is done. Next job to run is A5, even if we have
    higher priority job in channel B, because channel B has a capacity of 1.

    >>> cm.notify(db, 'A', 'A2', 2, 0, 10, None, 'done')
    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob A4>]

    Job B1 is done. Next job to run is B2 because it has higher priority.

    >>> cm.notify(db, 'B', 'B1', 1, 0, 5, None, 'done')
    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob B2>]

    Let's say A1 is done and A6 gets a higher priority. A6 will run next.

    >>> cm.notify(db, 'A', 'A1', 1, 0, 10, None, 'done')
    >>> cm.notify(db, 'A', 'A6', 6, 0, 5, None, 'pending')
    >>> pp(list(cm.get_jobs_to_run(now=100)))
    [<ChannelJob A6>]
    """

    def __init__(self):
        self._jobs_by_uuid = WeakValueDictionary()
        self._root_channel = Channel(name='root', parent=None, capacity=1)
        self._channels_by_name = WeakValueDictionary(root=self._root_channel)

    @classmethod
    def parse_simple_config(cls, config_string):
        """Parse a simple channels configuration string.

        The general form is as follow:
        channel(.subchannel)*(:capacity(:key(=value)?)*)?,...

        If capacity is absent, it defaults to 1.
        If a key is present without value, it gets True as value.
        When declaring subchannels, the root channel may be omitted
        (ie sub:4 is the same as root.sub:4).

        Returns a list of channel configuration dictionaries.

        >>> from pprint import pprint as pp
        >>> pp(ChannelManager.parse_simple_config('root:4'))
        [{'capacity': 4, 'name': 'root'}]
        >>> pp(ChannelManager.parse_simple_config('root:4,root.sub:2'))
        [{'capacity': 4, 'name': 'root'}, {'capacity': 2, 'name': 'root.sub'}]
        >>> pp(ChannelManager.parse_simple_config('root:4,root.sub:2:'
        ...                                       'sequential:k=v'))
        [{'capacity': 4, 'name': 'root'},
         {'capacity': 2, 'k': 'v', 'name': 'root.sub', 'sequential': True}]
        >>> pp(ChannelManager.parse_simple_config('root'))
        [{'capacity': 1, 'name': 'root'}]
        >>> pp(ChannelManager.parse_simple_config('sub:2'))
        [{'capacity': 2, 'name': 'sub'}]
        """
        res = []
        for channel_config_string in config_string.split(','):
            config = {}
            config_items = channel_config_string.split(':')
            name = config_items[0]
            if not name:
                raise ValueError('Invalid channel config %s: '
                                 'missing channel name' % config_string)
            config['name'] = name
            if len(config_items) > 1:
                capacity = config_items[1]
                try:
                    config['capacity'] = int(capacity)
                except:
                    raise ValueError('Invalid channel config %s: '
                                     'invalid capacity %s' %
                                     (config_string, capacity))
                for config_item in config_items[2:]:
                    kv = config_item.split('=')
                    if len(kv) == 1:
                        k, v = kv[0], True
                    elif len(kv) == 2:
                        k, v = kv
                    else:
                        raise ValueError('Invalid channel config %s: ',
                                         'incorrect config item %s'
                                         (config_string, config_item))
                    if k in config:
                        raise ValueError('Invalid channel config %s: '
                                         'duplicate key %s'
                                         (config_string, k))
                    config[k] = v
            else:
                config['capacity'] = 1
            res.append(config)
        return res

    def simple_configure(self, config_string):
        """Configure the channel manager from a simple configuration string

        >>> cm = ChannelManager()
        >>> c = cm.get_channel_by_name('root')
        >>> c.capacity
        1
        >>> cm.simple_configure('root:4,autosub.sub:2')
        >>> cm.get_channel_by_name('root').capacity
        4
        >>> cm.get_channel_by_name('root.autosub').capacity
        >>> cm.get_channel_by_name('root.autosub.sub').capacity
        2
        >>> cm.get_channel_by_name('autosub.sub').capacity
        2
        """
        for config in ChannelManager.parse_simple_config(config_string):
            self.get_channel_from_config(config)

    def get_channel_from_config(self, config):
        """Return a Channel object from a parsed configuration.

        If the channel does not exist it is created.
        The configuration is applied on the channel before returning it.
        If some of the parent channels are missing when creating a subchannel,
        the parent channels are auto created with an infinite capacity
        (except for the root channel, which defaults to a capacity of 1
        when not configured explicity).
        """
        channel = self.get_channel_by_name(config['name'], autocreate=True)
        channel.configure(config)
        return channel

    def get_channel_by_name(self, channel_name, autocreate=False):
        """Return a Channel object by its name.

        If it does not exist and autocreate is True, it is created
        with a default configuration and inserted in the Channels structure.
        If autocreate is False and the channel does not exist, an exception
        is raised.

        >>> cm = ChannelManager()
        >>> c = cm.get_channel_by_name('root', autocreate=False)
        >>> c.name
        'root'
        >>> c.fullname
        'root'
        >>> c = cm.get_channel_by_name('root.sub', autocreate=True)
        >>> c.name
        'sub'
        >>> c.fullname
        'root.sub'
        >>> c = cm.get_channel_by_name('sub', autocreate=True)
        >>> c.name
        'sub'
        >>> c.fullname
        'root.sub'
        >>> c = cm.get_channel_by_name('autosub.sub', autocreate=True)
        >>> c.name
        'sub'
        >>> c.fullname
        'root.autosub.sub'
        >>> c = cm.get_channel_by_name(None)
        >>> c.fullname
        'root'
        >>> c = cm.get_channel_by_name('root.sub')
        >>> c.fullname
        'root.sub'
        >>> c = cm.get_channel_by_name('sub')
        >>> c.fullname
        'root.sub'
        """
        if not channel_name or channel_name == self._root_channel.name:
            return self._root_channel
        if not channel_name.startswith(self._root_channel.name + '.'):
            channel_name = self._root_channel.name + '.' + channel_name
        if channel_name in self._channels_by_name:
            return self._channels_by_name[channel_name]
        if not autocreate:
            raise ChannelNotFound('Channel %s not found' % channel_name)
        parent = self._root_channel
        for subchannel_name in channel_name.split('.')[1:]:
            subchannel = parent.get_subchannel_by_name(subchannel_name)
            if not subchannel:
                subchannel = Channel(subchannel_name, parent, capacity=None)
                self._channels_by_name[subchannel.fullname] = subchannel
            parent = subchannel
        return parent

    def notify(self, db_name, channel_name, uuid,
               seq, date_created, priority, eta, state):
        try:
            channel = self.get_channel_by_name(channel_name)
        except ChannelNotFound:
            _logger.warning('unknown channel %s, '
                            'using root channel for job %s',
                            channel_name, uuid)
            channel = self._root_channel
        job = self._jobs_by_uuid.get(uuid)
        if job:
            # db_name is invariant
            assert job.db_name == db_name
            # date_created is invariant
            assert job.date_created == date_created
            # if one of the job properties that influence
            # scheduling order has changed, we remove the job
            # from the queues and create a new job object
            if (seq != job.seq or
                    priority != job.priority or
                    eta != job.eta or
                    channel != job.channel):
                _logger.debug("job %s properties changed, rescheduling it",
                              uuid)
                self.remove_job(uuid)
                job = None
        if not job:
            job = ChannelJob(db_name, channel, uuid,
                             seq, date_created, priority, eta)
            self._jobs_by_uuid[uuid] = job
        # state transitions
        if not state or state == DONE:
            job.channel.set_done(job)
        elif state == PENDING:
            job.channel.set_pending(job)
        elif state in (ENQUEUED, STARTED):
            job.channel.set_running(job)
        elif state == FAILED:
            job.channel.set_failed(job)
        else:
            _logger.error("unexpected state %s for job %s", state, job)

    def remove_job(self, uuid):
        job = self._jobs_by_uuid.get(uuid)
        if job:
            job.channel.remove(job)
            del self._jobs_by_uuid[job.uuid]

    def remove_db(self, db_name):
        for job in self._jobs_by_uuid.values():
            if job.db_name == db_name:
                job.channel.remove(job)
                del self._jobs_by_uuid[job.uuid]

    def get_jobs_to_run(self, now):
        return self._root_channel.get_jobs_to_run(now)
Ejemplo n.º 51
0
class JSONRedis:
    """Extended :class:`Redis` client for convenient use with JSON objects.

    Objects are stored as JSON-encoded strings in the Redis database and en-/decoding is handled
    transparently.

    The translation from an arbitrary object to a JSON-serializable form is carried out by a given
    ``encode(object)`` function. A JSON-serializable object is one that only cosists of the types
    given in https://docs.python.org/3/library/json.html#py-to-json-table . *encode* is passed as
    *default* argument to :func:`json.dumps()`.

    The reverse translation is done by a given ``decode(json)`` function. *decode* is passed as
    *object_hook* argument to :func:`json.loads()`.

    When *caching* is enabled, objects loaded from the Redis database are cached and subsequently
    retrieved from the cache. An object stays in the cache as long as there is a reference to it and
    it is automatically removed when the Python interpreter destroys it. Thus, it is guaranteed that
    getting the same key multiple times will yield the identical object.

    .. attribute:: r

       Underlying :class:`Redis` client.

    .. attribute:: encode

       Function to encode an object to a JSON-serializable form.

    .. attribute:: decode

       Function to decode an object from a JSON-serializable form.

    .. attribute:: caching

        Switch to enable / disable object caching.
    """

    def __init__(self, r, encode=None, decode=None, caching=True):
        self.r = r
        self.encode = encode
        self.decode = decode
        self.caching = caching
        self._cache = WeakValueDictionary()

    def oget(self, key):
        """Return the object at *key*."""
        object = self._cache.get(key) if self.caching else None
        if not object:
            value = self.get(key)
            if value:
                try:
                    object = json.loads(value.decode(), object_hook=self.decode)
                except ValueError:
                    raise ResponseError()
                if self.caching:
                    self._cache[key] = object
        return object

    def oset(self, key, object):
        """Set *key* to hold *object*."""
        if self.caching:
            self._cache[key] = object
        self.set(key, json.dumps(object, default=self.encode))

    def omget(self, keys):
        """Return a list of objects for the given *keys*."""
        # TODO: make atomic
        return [self.oget(k) for k in keys]

    def omset(self, mapping):
        """Set each key in *mapping* to its corresponding object."""
        # TODO: make atomic
        for key, object in mapping.items():
            self.oset(key, object)

    def __getattr__(self, name):
        # proxy
        return getattr(self.r, name)
Ejemplo n.º 52
0
class Manager:

    def __init__(self):
        self.listeners = {}
        self.cell_aliases = {}
        self.cell_rev_aliases = {}
        self.macro_listeners = {}
        self.observers = {}
        self.registrar_listeners = WeakKeyDictionary()
        self.rev_registrar_listeners = WeakKeyDictionary()
        self.pin_to_cells = {}
        self.cells = WeakValueDictionary()
        self.cell_to_output_pin = WeakKeyDictionary()
        self._childids = WeakValueDictionary()
        self.registrar_items = []
        self.unstable_workers = WeakSet()
        super().__init__()

    def set_stable(self, worker, value):
        assert value in (True, False), value
        if not value:
            #print("UNSTABLE", worker)
            self.unstable_workers.add(worker)
        else:
            #print("STABLE", worker)
            self.unstable_workers.discard(worker)

    def add_cell_alias(self, source, target):
        from .cell import Cell
        assert isinstance(source, Cell)
        assert isinstance(target, Cell)
        assert source is not target
        cell_id = self.get_cell_id(source)
        target_ref = weakref.ref(target)

        try:
            aliases = self.cell_aliases[cell_id]
            if target_ref not in aliases:
                aliases.append(target_ref)

        except KeyError:
            self.cell_aliases[cell_id] = [target_ref]

        if cell_id not in self.cells:
            self.cells[cell_id] = source

        #reverse alias
        cell_id = self.get_cell_id(target)
        source_ref = weakref.ref(source)

        try:
            rev_aliases = self.cell_rev_aliases[cell_id]
            if source_ref not in rev_aliases:
                rev_aliases.append(source_ref)

        except KeyError:
            self.cell_rev_aliases[cell_id] = [source_ref]

        if cell_id not in self.cells:
            self.cells[cell_id] = target

    def add_registrar_item(self, registrar_name, dtype, data, data_name):
        item = registrar_name, dtype, data, data_name
        for curr_item in self.registrar_items:
            if data_name is None:
                exists = (curr_item[:3] == item[:3])
            else:
                exists = (curr_item[:2] == item[:2]) and \
                  curr_item[3] == data_name
            if exists:
                raise ValueError("Registrar item already exists")
        self.registrar_items.append(item)

    def remove_registrar_item(self, registrar_name, dtype, data, data_name):
        item = registrar_name, dtype, data, data_name
        self.registrar_items.remove(item)

    def add_listener(self, cell, input_pin):
        cell_id = self.get_cell_id(cell)
        pin_ref = weakref.ref(input_pin)

        try:
            listeners = self.listeners[cell_id]
            assert pin_ref not in listeners
            # TODO: tolerate (silently ignore) a connection that exists already?
            listeners.append(pin_ref)

        except KeyError:
            self.listeners[cell_id] = [pin_ref]

        try:
            curr_pin_to_cells = self.pin_to_cells[input_pin.get_pin_id()]
            assert cell_id not in curr_pin_to_cells
            # TODO: tolerate (append) multiple inputs?
            curr_pin_to_cells.append(cell_id)

        except KeyError:
            self.pin_to_cells[input_pin.get_pin_id()] = [cell_id]

        if cell_id not in self.cells:
            self.cells[cell_id] = cell

    def _remove_listener(self, cell_id, input_pin, worker):
        input_pin_id = input_pin.get_pin_id()
        l = self.listeners[cell_id]
        l[:] = [ref for ref in l if ref().get_pin_id() != input_pin_id]
        if not len(l):
            self.listeners.pop(cell_id)
            cell = self.cells.get(cell_id, None)
            if cell is not None:
                cell._on_disconnect(input_pin, worker, False)

    def remove_listener(self, cell, input_pin):
        worker = input_pin.worker_ref()
        input_pin_id = input_pin.get_pin_id()
        cell_ids = self.pin_to_cells.pop(input_pin_id, [])
        cell_id = self.get_cell_id(cell)
        self._remove_listener(cell_id, input_pin, worker)

    def remove_listeners_pin(self, input_pin):
        worker = input_pin.worker_ref()
        cell_ids = self.pin_to_cells.pop(input_pin.get_pin_id(), [])
        for cell_id in cell_ids:
            self._remove_listener(cell_id, input_pin, worker)

    def remove_aliases(self, cell):
        cell_id = self.get_cell_id(cell)
        cell_ref = weakref.ref(cell)
        targets = self.cell_aliases.pop(cell_id, [])

        for target_ref in targets:
            target = target_ref()
            if target is None:
                continue
            target._on_disconnect(cell, None, incoming=True)
            target_id = self.get_cell_id(target)
            r = self.cell_rev_aliases[target_id]
            r[:] = [rr for rr in r if rr is not cell_ref]
            if not len(r):
                self.cell_rev_aliases.pop(target_id)

        #rev_aliases
        targets = self.cell_rev_aliases.pop(cell_id, [])

        for target_ref in targets:
            target = target_ref()
            if target is None:
                continue
            target_id = self.get_cell_id(target)
            r = self.cell_aliases[target_id]
            r[:] = [rr for rr in r if rr is not cell_ref]
            if not len(r):
                self.cell_aliases.pop(target_id)

    def remove_listeners_cell(self, cell):
        cell_id = self.get_cell_id(cell)
        listeners = self.listeners.pop(cell_id, [])
        for listener in listeners:
            pin = listener()
            if pin is None:
                continue
            pin_id = pin.get_pin_id()
            if pin_id not in self.pin_to_cells:
                continue
            self.pin_to_cells[pin_id][:] = \
                [c for c in self.pin_to_cells[pin_id] if c != cell_id ]


    def add_macro_listener(self, cell, macro_object, macro_arg):
        cell_id = self.get_cell_id(cell)
        m = (macro_object, macro_arg)

        try:
            macro_listeners = self.macro_listeners[cell_id]
            assert m not in macro_listeners
            macro_listeners.append(m)

        except KeyError:
            self.macro_listeners[cell_id] = [m]
            if cell_id not in self.cells:
                self.cells[cell_id] = cell

    def remove_macro_listener(self, cell, macro_object, macro_arg):
        cell_id = self.get_cell_id(cell)
        m = (macro_object, macro_arg)

        if cell_id in self.macro_listeners:
            l = self.macro_listeners[cell_id]
            if m in l:
                l.remove(m)

    def remove_macro_listeners_cell(self, cell):
        cell_id = self.get_cell_id(cell)
        listeners = self.macro_listeners.pop(cell_id, [])


    def add_registrar_listener(self, registrar, key, target, namespace_name):
        if registrar not in self.registrar_listeners:
            self.registrar_listeners[registrar] = {}
        d = self.registrar_listeners[registrar]
        if key not in d:
            d[key] = []
        d[key].append((weakref.ref(target), namespace_name))

        if target not in self.rev_registrar_listeners:
            self.rev_registrar_listeners[target] = {}
        r = self.rev_registrar_listeners[target]
        if key not in r:
            r[key] = []
        r[key].append(weakref.ref(registrar))

    def remove_registrar_listeners(self, target):
        if target not in self.rev_registrar_listeners:
            return
        rev = self.rev_registrar_listeners.pop(target)
        for key in rev:
            for registrar_ref in rev[key]:
                registrar = registrar_ref()
                if registrar not in self.registrar_listeners:
                    continue
                r = self.registrar_listeners[registrar]
                t = r[key]
                t[:] = [tt for tt in t if tt[0]() is not None and tt[0]() is not target]
                if not len(t):
                    r.pop(key)
                    if not len(r):
                        self.registrar_listeners.pop(registrar)


    def add_observer(self, cell, observer):
        cell_id = self.get_cell_id(cell)
        obs_ref = weakref.ref(observer)

        try:
            observers = self.observers[cell_id]
            assert obs_ref not in observers
            observers.append(obs_ref)
        except KeyError:
            self.observers[cell_id] = [obs_ref]
        if cell_id not in self.cells:
            self.cells[cell_id] = cell

    def remove_observer(self, cell, observer):
        cell_id = self.get_cell_id(cell)
        obs_ref = weakref.ref(observer)

        if cell_id in self.observers:
            l = self.observers[cell_id]
            if obs_ref in l:
                l.remove(obs_ref)

    def remove_observers_cell(self, cell):
        cell_id = self.get_cell_id(cell)
        listeners = self.observers.pop(cell_id, [])

    def _update(self, cell, dtype, value, *,
            worker=None, only_last=False):
        import threading
        assert threading.current_thread() is threading.main_thread()
        from .cell import Signal
        cell_id = self.get_cell_id(cell)

        observers = self.observers.get(cell_id, [])
        for observer in observers:
            obs = observer()
            if obs is not None:
                obs(value)

        macro_listeners = self.macro_listeners.get(cell_id, [])
        if not only_last:
            for macro_object, macro_arg in macro_listeners:
                try:
                    updated = macro_object.update_cell(macro_arg)
                except Exception:
                    #TODO: proper logging
                    import traceback
                    traceback.print_exc()

        aliases = self.cell_aliases.get(cell_id, [])
        for target_cell_ref in aliases:
            target_cell = target_cell_ref()
            if target_cell is not None:
                if isinstance(target_cell, Signal):
                    #print("cell-cell alias", cell, "=>", target_cell)
                    self._update(target_cell, None, None,
                        worker=worker, only_last=only_last)
                else:
                    value2 = value
                    if dtype is not None and \
                      (dtype == "cson" or dtype[0] == "cson") and \
                      target_cell.dtype is not None and \
                      (target_cell.dtype == "json" or target_cell.dtype[0] == "json"):
                        if isinstance(value, (str, bytes)):
                            value2 = cson2json(value)

                    target_cell._update(value2, propagate=True)

        listeners = self.listeners.get(cell_id, [])
        if only_last:
            listeners = listeners[-1:]

        resource_name0 = None
        if cell.resource is not None:
            resource_name0 = cell.resource.filepath
        for input_pin_ref in listeners:
            input_pin = input_pin_ref()

            if input_pin is None:
                continue #TODO: error?

            if worker is not None and input_pin.worker_ref() is worker:
                continue
            value2 = value
            if dtype is not None and \
              (dtype == "cson" or dtype[0] == "cson") and \
              input_pin.dtype is not None and \
              (input_pin.dtype == "json" or input_pin.dtype[0] == "json"):
                if isinstance(value, (str, bytes)):
                    value2 = cson2json(value)
            resource_name = "pin: " + str(input_pin)
            if resource_name0 is not None:
                resource_name = resource_name0 + " in " + resource_name
            input_pin.receive_update(value2, resource_name)

    def update_from_code(self, cell, only_last=False):
        import seamless
        if cell.dtype == "array":
            value = TransportedArray(cell._data, cell._store)
        else:
            value = cell._data
        #print("manager.update_from_code", cell, head(value))
        self._update(cell, cell.dtype, value, only_last=only_last)
        from .. import run_work
        from .macro import get_macro_mode
        if not get_macro_mode():
            run_work()

    def update_from_worker(self, cell_id, value, worker, *, preliminary):
        import seamless
        from .cell import Signal
        cell = self.cells.get(cell_id, None)
        if cell is None or cell._destroyed:
            return #cell has died...
        #print("manager.update_from_worker", cell, head(value), worker)

        if isinstance(cell, Signal):
            assert value is None
            self._update(cell, None, None, worker=worker)
        else:
            changed = cell._update(value,propagate=False,
                preliminary=preliminary)
            if changed:
                if cell.dtype == "array":
                    value = TransportedArray(value, cell._store)
                self._update(cell, cell.dtype, value, worker=worker)

    def update_registrar_key(self, registrar, key):
        from .worker import Worker
        from .macro import MacroObject
        if registrar not in self.registrar_listeners:
            return
        d = self.registrar_listeners[registrar]
        if key not in d:
            return
        for t in list(d[key]):
            target = t[0]()
            if target is None:
                continue
            if isinstance(target, Worker):
                namespace_name = t[1]
                target.receive_registrar_update(registrar.name, key, namespace_name)
            elif isinstance(target, MacroObject):
                target.update_cell((registrar.name, key))
            else:
                raise TypeError(target)
    @classmethod
    def get_cell_id(cls, cell):
        return id(cell)

    def disconnect(self, source, target):
        from .transformer import Transformer
        from .cell import Cell, CellLike
        from .context import Context
        from .worker import EditPinBase, ExportedEditPin, \
            InputPinBase, ExportedInputPin, OutputPinBase, ExportedOutputPin
        if isinstance(source, EditPinBase):
            source, target = target, source
        if isinstance(source, CellLike) and source._like_cell:
            if isinstance(target, ExportedInputPin):
                target = target.get_pin()
            if isinstance(source, Context):
                assert "_output" in source._pins
                source = source._pins["_output"]
            self.remove_listener(source, target)
            worker = target.worker_ref()
            if worker is not None:
                source._on_disconnect(target, worker, incoming = False)

        elif isinstance(source, OutputPinBase):
            if isinstance(target, Context):
                assert "_input" in target._pins
                target = target._pins["_input"]
            if isinstance(source, ExportedOutputPin):
                source = source.get_pin()

            cell_id = self.get_cell_id(target)

            ok = False
            if cell_id in self.cells and \
              target in self.cell_to_output_pin:
                if cell_id not in source._cell_ids:
                    ok = False
                else:
                    for ref in self.cell_to_output_pin[target]:
                        if ref() is source:
                            self.cell_to_output_pin[target].remove(ref)
                            source._cell_ids.remove(cell_id)
                            ok = True
            if not ok:
                raise ValueError("Connection does not exist")

                if target not in self.cell_to_output_pin:
                    self.cell_to_output_pin[target] = []
                self.cell_to_output_pin[target].append(weakref.ref(source))

            worker = source.worker_ref()
            if worker is not None:
                if isinstance(worker, Transformer):
                    worker._on_disconnect_output()
                target._on_disconnect(source, worker, incoming = True)

        else:
            raise TypeError

    def connect(self, source, target):
        from .transformer import Transformer
        from .cell import Cell, CellLike
        from .context import Context
        from .worker import EditPinBase, ExportedEditPin, \
            InputPinBase, ExportedInputPin, OutputPinBase, ExportedOutputPin
        if isinstance(source, EditPinBase):
            source, target = target, source
        if isinstance(source, CellLike) and source._like_cell:
            assert isinstance(target, (InputPinBase, EditPinBase, CellLike))
            assert source._get_manager() is self
            assert target._get_manager() is self
            if isinstance(target, ExportedInputPin):
                target = target.get_pin()

            if isinstance(target, Cell):
                self.add_cell_alias(source, target)
                target._on_connect(source, None, incoming = True)
                if source._status == Cell.StatusFlags.OK:
                    value = source._data
                    if source.dtype is not None and \
                      (source.dtype == "cson" or source.dtype[0] == "cson") and \
                      target.dtype is not None and \
                      (target.dtype == "json" or target.dtype[0] == "json"):
                        if isinstance(value, (str, bytes)):
                            value = cson2json(value)
                    target._update(value,propagate=True)

                return
            assert not isinstance(target, Context) #TODO?
            worker = target.worker_ref()
            assert worker is not None #weakref may not be dead
            source._on_connect(target, worker, incoming = False)
            self.add_listener(source, target)

            if source._status == Cell.StatusFlags.OK:
                self.update_from_code(source, only_last=True)
            else:
                if isinstance(target, EditPinBase) and target.last_value is not None:
                    self.update_from_worker(
                        self.get_cell_id(source),
                        target.last_value,
                        worker, preliminary=False
                    )

        elif isinstance(source, OutputPinBase):
            assert isinstance(target, CellLike) and target._like_cell
            if isinstance(target, Context):
                assert "_input" in target._pins
                target = target._pins["_input"]
            if isinstance(source, ExportedOutputPin):
                source = source.get_pin()
            worker = source.worker_ref()
            assert worker is not None #weakref may not be dead
            target._on_connect(source, worker, incoming = True)
            cell_id = self.get_cell_id(target)
            if cell_id not in self.cells:
                self.cells[cell_id] = target

            if cell_id not in source._cell_ids:
                source._cell_ids.append(cell_id)
                if target not in self.cell_to_output_pin:
                    self.cell_to_output_pin[target] = []
                self.cell_to_output_pin[target].append(weakref.ref(source))

            if isinstance(worker, Transformer):
                worker._on_connect_output()
            elif source.last_value is not None:
                self.update_from_worker(
                    cell_id,
                    source.last_value,
                    worker,
                    preliminary=False
                )


        else:
            raise TypeError
Ejemplo n.º 53
0
class Client(object):
    # TODO optional HTTP/2 support: this makes multiple queries simultaneously.

    def __init__(self, api_root_url, schema_path='/schema', fetch_schema=True, **session_kwargs):
        self._instances = WeakValueDictionary()
        self._resources = {}

        self.session = session = requests.Session()
        for key, value in session_kwargs.items():
            setattr(session, key, value)

        parse_result = urlparse(api_root_url)
        self._root_url = '{}://{}'.format(parse_result.scheme, parse_result.netloc)
        self._api_root_url = api_root_url  # '{}://{}'.format(parse_result.scheme, parse_result.netloc)
        self._root_path = parse_result.path
        self._schema_url = api_root_url + schema_path

        if fetch_schema:
            self._fetch_schema()

    def _fetch_schema(self):
        schema = self.session \
            .get(self._schema_url) \
            .json(cls=PotionJSONSchemaDecoder,
                  referrer=self._schema_url,
                  client=self)

        # NOTE these should perhaps be definitions in Flask-Potion
        for name, resource_schema in schema['properties'].items():
            resource = self.resource_factory(name, resource_schema)
            setattr(self, upper_camel_case(name), resource)

    def instance(self, uri, cls=None, default=None, **kwargs):
        instance = self._instances.get(uri, None)

        if instance is None:
            if cls is None:
                try:
                    cls = self._resources[uri[:uri.rfind('/')]]
                except KeyError:
                    cls = Reference

            if isinstance(default, Resource) and default._uri is None:
                default._status = 200
                default._uri = uri
                instance = default
            else:
                instance = cls(uri=uri, **kwargs)
            self._instances[uri] = instance
        return instance

    def fetch(self, uri, cls=PotionJSONDecoder, **kwargs):
        # TODO handle URL fragments (#properties/id etc.)
        response = self.session \
            .get(urljoin(self._root_url, uri, True))

        response.raise_for_status()

        return response.json(cls=cls,
                             client=self,
                             referrer=uri,
                             **kwargs)

    def resource_factory(self, name, schema, resource_cls=None):
        """
        Registers a new resource with a given schema. The schema must not have any unresolved references
        (such as `{"$ref": "#"}` for self-references, or otherwise). A subclass of :class:`Resource`
        may be provided to add specific functionality to the resulting :class:`Resource`.

        :param str name:
        :param dict schema:
        :param Resource resource_cls: a subclass of :class:`Resource` or None
        :return: The new :class:`Resource`.
        """
        cls = type(str(upper_camel_case(name)), (resource_cls or Resource, collections.MutableMapping), {
            '__doc__': schema.get('description', '')
        })

        cls._schema = schema
        cls._client = self
        cls._links = links = {}

        for link_schema in schema['links']:
            link = Link(self,
                        rel=link_schema['rel'],
                        href=link_schema['href'],
                        method=link_schema['method'],
                        schema=link_schema.get('schema', None),
                        target_schema=link_schema.get('targetSchema', None))

            # Set Resource._self, etc. for the special methods as they are managed by the Resource class
            if link.rel in ('self', 'instances', 'create', 'update', 'destroy'):
                setattr(cls, '_{}'.format(link.rel), link)
            links[link.rel] = link

            if link.rel != 'update':  # 'update' is a special case because of MutableMapping.update()
                setattr(cls, snake_case(link.rel), link)

        # TODO routes (instance & non-instance)

        for property_name, property_schema in schema.get('properties', {}).items():
            # skip $uri and $id as these are already implemented in Resource and overriding them causes unnecessary
            # fetches.
            if property_name.startswith('$'):
                continue

            if property_schema.get('readOnly', False):
                # TODO better error message. Raises AttributeError("can't set attribute")
                setattr(cls,
                        property_name,
                        property(fget=partial((lambda name, obj: getitem(obj, name)), property_name),
                                 doc=property_schema.get('description', None)))
            else:
                setattr(cls,
                        property_name,
                        property(fget=partial((lambda name, obj: getitem(obj, name)), property_name),
                                 fset=partial((lambda name, obj, value: setitem(obj, name, value)), property_name),
                                 fdel=partial((lambda name, obj: delitem(obj, name)), property_name),
                                 doc=property_schema.get('description', None)))

        root = None
        if 'instances' in links:
            root = cls._instances.href
        elif 'self' in links:
            root = cls._self.href[:cls._self.href.rfind('/')]
        else:
            root = self._root_path + '/' + name.replace('_', '-')

        self._resources[root] = cls
        return cls
Ejemplo n.º 54
0
class Container:
    """
        Container used for registering and resolving components. Ideally no
        component will depend on a reference to the container.
    """

    def __init__(self):
        self._key_to_registration = dict()
        self._singleton_instances = dict()
        self._singleton_instances_lock = threading.Lock()
        self._weak_references = WeakValueDictionary()
        self._weak_references_lock = threading.Lock()
        self._thread_local = threading.local()

    def register_instance(self, instance: object, register_key=None):
        key = register_key if register_key else instance.__name__.lower()
        registration = Registration(instance.__class__)
        registration.usage = RegistrationUsage.singleton
        self._key_to_registration[key] = registration
        with self._singleton_instances_lock:
            self._singleton_instances[key] = instance

    def register(self, type_ref: type, register_key=None) -> Registration:
        key = register_key if register_key else type_ref.__name__.lower()
        registration = Registration(type_ref)
        self._key_to_registration[key] = registration
        return registration

    def resolve_by_type(self, type_ref: type):
        reg = Registration(type_ref)
        return self.__get_instance_for("type::"+type_ref.__name__.lower(), reg)

    def resolve_by_key(self, register_key):
        try:
            found = self._key_to_registration[register_key]
        except KeyError:
            raise RegistrationNotFoundError(register_key)
        return self.__get_instance_for(register_key, found)

    def __resolve_by_parameter(self, parameter: Parameter):
        try:
            if parameter.annotation != Parameter.empty:
                return self.resolve_by_key(parameter.annotation)
            return self.resolve_by_key(parameter.name.lower())
        except RegistrationNotFoundError:
            # fixme non registered defaults are expensive...
            if parameter.default is Parameter.empty:
                raise
            return parameter.default

    def __get_instance_for(self, cache_key, registration: Registration):
        def create_func():
            params = signature(registration.type_ref).parameters
            args = {name: self.__resolve_by_parameter(param)
                    for name, param in params.items()}
            return registration.type_ref(**args)

        if registration.usage is RegistrationUsage.temporal:
            return create_func()
        elif registration.usage is RegistrationUsage.singleton:
            return self.__get_singleton(cache_key, create_func)
        elif registration.usage is RegistrationUsage.weak_reference:
            return self.__get_weak(cache_key, create_func)
        elif registration.usage is RegistrationUsage.thread_local:
            return self.__get_thread_local(cache_key, create_func)

    def __get_weak(self, cache_key, create_func):
        result = self._weak_references.get(cache_key, None)
        if result is not None:
            return result
        with self._weak_references_lock:
            result = self._weak_references.get(cache_key, None)
            if result is not None:
                return result
            result = create_func()
            self._weak_references[cache_key] = result
            return result

    def __get_singleton(self, cache_key, create_func):
        if cache_key in self._singleton_instances:
            return self._singleton_instances[cache_key]
        with self._singleton_instances_lock:
            if cache_key in self._singleton_instances:
                return self._singleton_instances[cache_key]
            result = create_func()
            self._singleton_instances[cache_key] = result
            return result

    def __get_thread_local(self, cache_key, create_func):
        local = self._thread_local
        if 'registration_dictionary' not in local.__dict__:
            local.registration_dictionary = dict()
        if cache_key in local.registration_dictionary:
            return local.registration_dictionary[cache_key]
        result = create_func()
        local.registration_dictionary[cache_key] = result
        return result
Ejemplo n.º 55
0
class Boss:
    def __init__(self, os_window_id, opts, args, cached_values):
        self.window_id_map = WeakValueDictionary()
        self.cached_values = cached_values
        self.os_window_map = {}
        self.cursor_blinking = True
        self.shutting_down = False
        talk_fd = getattr(single_instance, 'socket', None)
        talk_fd = -1 if talk_fd is None else talk_fd.fileno()
        self.child_monitor = ChildMonitor(
            self.on_child_death,
            DumpCommands(args)
            if args.dump_commands or args.dump_bytes else None, talk_fd)
        set_boss(self)
        self.current_font_size = opts.font_size
        set_font_family(opts)
        self.opts, self.args = opts, args
        initialize_renderer()
        startup_session = create_session(opts, args)
        self.add_os_window(startup_session, os_window_id=os_window_id)

    def add_os_window(self,
                      startup_session,
                      os_window_id=None,
                      wclass=None,
                      wname=None,
                      size=None,
                      startup_id=None):
        dpi_changed = False
        if os_window_id is None:
            w, h = initial_window_size(
                self.opts, self.cached_values) if size is None else size
            cls = wclass or self.args.cls or appname
            os_window_id = create_os_window(w, h, appname, wname
                                            or self.args.name or cls, cls)
            if startup_id:
                ctx = init_startup_notification(os_window_id, startup_id)
            dpi_changed = show_window(os_window_id)
            if startup_id:
                end_startup_notification(ctx)
        tm = TabManager(os_window_id, self.opts, self.args, startup_session)
        self.os_window_map[os_window_id] = tm
        if dpi_changed:
            self.on_dpi_change(os_window_id)

    def list_os_windows(self):
        for os_window_id, tm in self.os_window_map.items():
            yield {
                'id': os_window_id,
                'tabs': list(tm.list_tabs()),
            }

    def match_windows(self, match):
        field, exp = match.split(':', 1)
        pat = re.compile(exp)
        for tm in self.os_window_map.values():
            for tab in tm:
                for window in tab:
                    if window.matches(field, pat):
                        yield window

    def tab_for_window(self, window):
        for tm in self.os_window_map.values():
            for tab in tm:
                for w in tab:
                    if w.id == window.id:
                        return tab

    def match_tabs(self, match):
        field, exp = match.split(':', 1)
        pat = re.compile(exp)
        tms = tuple(self.os_window_map.values())
        found = False
        if field in ('title', 'id'):
            for tm in tms:
                for tab in tm:
                    if tab.matches(field, pat):
                        yield tab
                        found = True
        if not found:
            tabs = {self.tab_for_window(w) for w in self.match_windows(match)}
            for tab in tabs:
                if tab:
                    yield tab

    def set_active_window(self, window):
        for tm in self.os_window_map.values():
            for tab in tm:
                for w in tab:
                    if w.id == window.id:
                        if tab is not self.active_tab:
                            tm.set_active_tab(tab)
                        tab.set_active_window(w)
                        return

    def _new_os_window(self, args, cwd_from=None):
        sw = self.args_to_special_window(args, cwd_from) if args else None
        startup_session = create_session(self.opts,
                                         special_window=sw,
                                         cwd_from=cwd_from)
        self.add_os_window(startup_session)

    def new_os_window(self, *args):
        self._new_os_window(args)

    def new_os_window_with_cwd(self, *args):
        w = self.active_window
        cwd_from = w.child.pid if w is not None else None
        self._new_os_window(args, cwd_from)

    def add_child(self, window):
        self.child_monitor.add_child(window.id, window.child.pid,
                                     window.child.child_fd, window.screen)
        self.window_id_map[window.id] = window

    def peer_messages_received(self, messages):
        import json
        for msg in messages:
            msg = json.loads(msg.decode('utf-8'))
            if isinstance(msg, dict) and msg.get('cmd') == 'new_instance':
                startup_id = msg.get('startup_id')
                args, rest = parse_args(msg['args'][1:])
                args.args = rest
                opts = create_opts(args)
                session = create_session(opts, args)
                self.add_os_window(session,
                                   wclass=args.cls,
                                   wname=args.name,
                                   size=initial_window_size(opts),
                                   startup_id=startup_id)
            else:
                safe_print('Unknown message received from peer, ignoring')

    def handle_remote_cmd(self, cmd, window=None):
        response = None
        if self.opts.allow_remote_control:
            try:
                response = handle_cmd(self, window, cmd)
            except Exception as err:
                import traceback
                response = {
                    'ok': False,
                    'error': str(err),
                    'tb': traceback.format_exc()
                }
        else:
            response = {
                'ok':
                False,
                'error':
                'Remote control is disabled. Add allow_remote_control yes to your kitty.conf'
            }
        if response is not None:
            if window is not None:
                window.send_cmd_response(response)

    def on_child_death(self, window_id):
        window = self.window_id_map.pop(window_id, None)
        if window is None:
            return
        if window.action_on_close:
            try:
                window.action_on_close(window)
            except Exception:
                import traceback
                traceback.print_exc()
        os_window_id = window.os_window_id
        window.destroy()
        tm = self.os_window_map.get(os_window_id)
        if tm is None:
            return
        for tab in tm:
            if window in tab:
                break
        else:
            return
        tab.remove_window(window)
        if len(tab) == 0:
            tm.remove(tab)
            tab.destroy()
            if len(tm) == 0:
                if not self.shutting_down:
                    mark_os_window_for_close(os_window_id)
                    glfw_post_empty_event()

    def close_window(self, window=None):
        if window is None:
            window = self.active_window
        self.child_monitor.mark_for_close(window.id)

    def close_tab(self, tab=None):
        if tab is None:
            tab = self.active_tab
        for window in tab:
            self.close_window(window)

    def toggle_fullscreen(self):
        toggle_fullscreen()

    def start(self):
        if not getattr(self, 'io_thread_started', False):
            self.child_monitor.start()
            self.io_thread_started = True

    def activate_tab_at(self, os_window_id, x):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            tm.activate_tab_at(x)

    def on_window_resize(self, os_window_id, w, h, dpi_changed):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            if dpi_changed:
                if set_dpi_from_os_window(os_window_id):
                    self.on_dpi_change(os_window_id)
                else:
                    tm.resize()
            else:
                tm.resize()

    def increase_font_size(self):
        self.change_font_size(
            min(self.opts.font_size * 5,
                self.current_font_size + self.opts.font_size_delta))

    def decrease_font_size(self):
        self.change_font_size(
            max(MINIMUM_FONT_SIZE,
                self.current_font_size - self.opts.font_size_delta))

    def restore_font_size(self):
        self.change_font_size(self.opts.font_size)

    def _change_font_size(self, new_size=None, on_dpi_change=False):
        if new_size is not None:
            self.current_font_size = new_size
        old_cell_width, old_cell_height = viewport_for_window()[-2:]
        windows = tuple(filter(None, self.window_id_map.values()))
        resize_fonts(self.current_font_size, on_dpi_change=on_dpi_change)
        layout_sprite_map()
        prerender()
        for window in windows:
            window.screen.rescale_images(old_cell_width, old_cell_height)
            window.screen.refresh_sprite_positions()
        for tm in self.os_window_map.values():
            tm.resize()
            tm.refresh_sprite_positions()
        glfw_post_empty_event()

    def change_font_size(self, new_size):
        if new_size == self.current_font_size:
            return
        self._change_font_size(new_size)

    def on_dpi_change(self, os_window_id):
        self._change_font_size()

    @property
    def active_tab_manager(self):
        os_window_id = current_os_window()
        return self.os_window_map.get(os_window_id)

    @property
    def active_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            return tm.active_tab

    @property
    def active_window(self):
        t = self.active_tab
        if t is not None:
            return t.active_window

    def dispatch_special_key(self, key, scancode, action, mods):
        # Handles shortcuts, return True if the key was consumed
        key_action = get_shortcut(self.opts.keymap, mods, key, scancode)
        self.current_key_press_info = key, scancode, action, mods
        return self.dispatch_action(key_action)

    def dispatch_action(self, key_action):
        if key_action is not None:
            f = getattr(self, key_action.func, None)
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        tab = self.active_tab
        if tab is None:
            return False
        window = self.active_window
        if window is None:
            return False
        if key_action is not None:
            f = getattr(tab, key_action.func,
                        getattr(window, key_action.func, None))
            if f is not None:
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        return False

    def combine(self, *actions):
        for key_action in actions:
            self.dispatch_action(key_action)

    def on_focus(self, os_window_id, focused):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.focus_changed(focused)

    def on_drop(self, os_window_id, paths):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.paste('\n'.join(paths))

    def on_os_window_closed(self, os_window_id, viewport_width,
                            viewport_height):
        self.cached_values['window-size'] = viewport_width, viewport_height
        tm = self.os_window_map.pop(os_window_id, None)
        if tm is not None:
            tm.destroy()
        for window_id in tuple(
                w.id for w in self.window_id_map.values()
                if getattr(w, 'os_window_id', None) == os_window_id):
            self.window_id_map.pop(window_id, None)

    def display_scrollback(self, window, data):
        tab = self.active_tab
        if tab is not None and window.overlay_for is None:
            tab.new_special_window(
                SpecialWindow(self.opts.scrollback_pager,
                              data,
                              _('History'),
                              overlay_for=window.id))

    def input_unicode_character(self):
        w = self.active_window
        tab = self.active_tab
        if w is not None and tab is not None and w.overlay_for is None:
            overlay_window = tab.new_special_window(
                SpecialWindow([
                    'kitty', '+runpy',
                    'from kittens.unicode_input.main import main; main()'
                ],
                              overlay_for=w.id))
            overlay_window.action_on_close = partial(
                self.send_unicode_character, w.id)

    def send_unicode_character(self, target_window_id, source_window):
        w = self.window_id_map.get(target_window_id)
        if w is not None:
            output = str(source_window.screen.linebuf.line(0))
            if output.startswith('OK: '):
                try:
                    text = chr(int(output.partition(' ')[2], 16))
                except Exception:
                    import traceback
                    traceback.print_exc()
                else:
                    w.paste(text)

    def switch_focus_to(self, window_idx):
        tab = self.active_tab
        tab.set_active_window_idx(window_idx)
        old_focus = tab.active_window
        if not old_focus.destroyed:
            old_focus.focus_changed(False)
        tab.active_window.focus_changed(True)

    def open_url(self, url):
        if url:
            open_url(url, self.opts.open_url_with)

    def open_url_lines(self, lines):
        self.open_url(''.join(lines))

    def destroy(self):
        self.shutting_down = True
        self.child_monitor.shutdown_monitor()
        wakeup()
        self.child_monitor.join()
        del self.child_monitor
        for tm in self.os_window_map.values():
            tm.destroy()
        self.os_window_map = {}
        destroy_sprite_map()
        destroy_global_data()

    def paste_to_active_window(self, text):
        if text:
            w = self.active_window
            if w is not None:
                w.paste(text)

    def paste_from_clipboard(self):
        text = get_clipboard_string()
        self.paste_to_active_window(text)

    def paste_from_selection(self):
        text = get_primary_selection()
        self.paste_to_active_window(text)

    def set_primary_selection(self):
        w = self.active_window
        if w is not None and not w.destroyed:
            text = w.text_for_selection()
            if text:
                set_primary_selection(text)
                if self.opts.copy_on_select:
                    set_clipboard_string(text)

    def goto_tab(self, tab_num):
        tm = self.active_tab_manager
        if tm is not None:
            tm.goto_tab(tab_num - 1)

    def next_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab()

    def previous_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab(-1)

    def args_to_special_window(self, args, cwd_from=None):
        args = list(args)
        stdin = None
        w = self.active_window

        def data_for_at(arg):
            if arg == '@selection':
                return w.text_for_selection()
            if arg == '@ansi':
                return w.buffer_as_ansi()
            if arg == '@text':
                return w.buffer_as_text()

        if args[0].startswith('@'):
            stdin = data_for_at(args[0]) or None
            if stdin is not None:
                stdin = stdin.encode('utf-8')
            del args[0]

        cmd = []
        for arg in args:
            if arg == '@selection':
                arg = data_for_at(arg)
                if not arg:
                    continue
            cmd.append(arg)
        return SpecialWindow(cmd, stdin, cwd_from=cwd_from)

    def _new_tab(self, args, cwd_from=None):
        special_window = None
        if args:
            if isinstance(args, SpecialWindowInstance):
                special_window = args
            else:
                special_window = self.args_to_special_window(args,
                                                             cwd_from=cwd_from)
        tm = self.active_tab_manager
        if tm is not None:
            tm.new_tab(special_window=special_window, cwd_from=cwd_from)

    def new_tab(self, *args):
        self._new_tab(args)

    def new_tab_with_cwd(self, *args):
        w = self.active_window
        cwd_from = w.child.pid if w is not None else None
        self._new_tab(args, cwd_from=cwd_from)

    def _new_window(self, args, cwd_from=None):
        tab = self.active_tab
        if tab is not None:
            if args:
                tab.new_special_window(
                    self.args_to_special_window(args, cwd_from=cwd_from))
            else:
                tab.new_window(cwd_from=cwd_from)

    def new_window(self, *args):
        self._new_window(args)

    def new_window_with_cwd(self, *args):
        w = self.active_window
        if w is None:
            return self.new_window(*args)
        cwd_from = w.child.pid if w is not None else None
        self._new_window(args, cwd_from=cwd_from)

    def move_tab_forward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(1)

    def move_tab_backward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(-1)
Ejemplo n.º 56
0
class NodeManager(BaseNodeManager):
    NODE_CLASS = Node

    def __init__(self, project):
        super().__init__()
        # XXX [!] cycle reference
        self.project = project
        self.data = WeakValueDictionary()
        self.root = None

    def update_root(self, root_project, root_project_children):
        self.root = self.new_root_node(root_project, root_project_children)

    def __setitem__(self, projectid, node):
        assert self.check_not_exist_node(node)
        assert projectid == node.projectid
        self.data[node.projectid] = node
        assert self.check_exist_node(node)

    def __delitem__(self, node):
        assert self.check_exist_node(node)
        del self.data[node.projectid]
        assert self.check_not_exist_node(node)

    def __iter__(self):
        # how to support lock for iter? just copy dict?
        return iter(self.data.values())

    def __contains__(self, node):
        if node is None:
            return False

        newnode = self.data.get(node.projectid)
        return node is newnode # ?!

    def __len__(self):
        return len(self.data)

    def __bool__(self):
        return len(self) != 0

    @property
    def get(self):
        return self.data.get

    @property
    def clear(self):
        return self.data.clear

    # TODO: add expend node. (not operation.)

    def check_exist_node(self, node):
        original_node = self.get(node.projectid)
        if original_node is None:
            raise WFNodeError("{!r} is not exists.".format(node))
        elif original_node is not node:
            raise WFNodeError("{!r} is invalid node.".format(node))
        return True

    def check_not_exist_node(self, node):
        if node in self:
            raise WFNodeError("{!r} is already exists.".format(node))
        return True

    def new_void_node(self, uuid=None):
        return self.NODE_CLASS.from_void(uuid, project=self.project)

    def new_node_from_json(self, data, parent=None):
        return self.NODE_CLASS.from_json_with_project(data, parent=parent, project=self.project)

    def add(self, node, recursion=True):
        assert recursion is True

        added_nodes = 0
        def register_node(node):
            nonlocal added_nodes
            self[node.projectid] = node
            added_nodes += 1

        register_node(node)
        if recursion:
            def deep(node):
                for subnode in node:
                    register_node(subnode)
                    deep(subnode)

            deep(node)

        return added_nodes

    def remove(self, node, recursion=True):
        assert self.check_exist_node(node)
        if node.parent is not None:
            assert self.check_exist_node(node.parent)
            if node in node.parent:
                raise WFNodeError("node are still exists in parent node.")

        removed_nodes = 0
        def unregister_node(node):
            nonlocal removed_nodes
            del node.parent
            del self[node]
            removed_nodes += 1

        unregister_node(node)
        if recursion:
            def deep(node):
                if not node:
                    return

                child_nodes, node.ch = node.ch[:], None
                for child in child_nodes:
                    unregister_node(node)
                    deep(child)

            deep(node)

        return removed_nodes

    def new_root_node(self, root_project, root_project_children):
        # XXX [!] project is Project, root_project is root node. ?!
        if root_project is None:
            root_project = dict(id=DEFAULT_ROOT_NODE_ID)
        else:
            root_project.update(id=DEFAULT_ROOT_NODE_ID)
            # in shared mode, root will have uuid -(replace)> DEFAULT_ROOT_NODE_ID

        root_project.update(ch=root_project_children)
        root = self.new_node_from_json(root_project)
        self.add(root, recursion=True)
        return root

    @property
    def pretty_print(self):
        return self.root.pretty_print
Ejemplo n.º 57
0
class EntityCache(object):
    """
    Cache for entities.

    Supports add and remove operations as well as lookup by ID and
    by slug.
    """
    def __init__(self, entities=None, allow_none_id=True):
        """
        :param bool allow_none_id: Flag specifying if calling :meth:`add`
            with an entity that does not have an ID is allowed.
        """
        # Flag indicating if None IDs are allowed in this cache.
        self.__allow_none_id = allow_none_id
        # List of cached entities. This is the only place we are holding a
        # real reference to the entity.
        if entities is None:
            entities = []
        self.__entities = entities
        # Dictionary mapping entity IDs to entities for fast lookup by ID.
        self.__id_map = WeakValueDictionary()
        # Dictionary mapping entity slugs to entities for fast lookup by slug.
        self.__slug_map = {}

    def get_by_id(self, entity_id):
        """
        Performs a lookup of an entity by its ID.

        :param int entity_id: entity ID.
        :return: entity found or ``None``.
        """
        return self.__id_map.get(entity_id)

    def has_id(self, entity_id):
        """
        Checks if this entity cache holds an entity with the given ID.

        :return: Boolean result of the check.
        """
        return entity_id in self.__id_map

    def get_by_slug(self, entity_slug):
        """
        Performs a lookup of an entity by its slug.

        :param str entity_id: entity slug.
        :return: entity found or ``None``.
        """
        return self.__slug_map.get(entity_slug)

    def has_slug(self, entity_slug):
        return entity_slug in self.__slug_map

    def add(self, entity):
        """
        Adds the given entity to this cache.

        :param entity: Entity to add.
        :type entity: Object implementing :class:`everest.interfaces.IEntity`.
        :raises ValueError: If the ID of the entity to add is ``None``
          (unless the `allow_none_id` constructor argument was set).
        """
        do_append = self.__check_new(entity)
        if do_append:
            self.__entities.append(entity)

    def remove(self, entity):
        """
        Removes the given entity from this cache.

        :param entity: Entity to remove.
        :type entity: Object implementing :class:`everest.interfaces.IEntity`.
        :raises KeyError: If the given entity is not in this cache.
        :raises ValueError: If the ID of the given entity is `None`.
        """
        self.__id_map.pop(entity.id, None)
        self.__slug_map.pop(entity.slug, None)
        self.__entities.remove(entity)

    def update(self, source_data, target_entity):
        """
        Updates the state of the target entity with the given source data.

        :param target_entity: Entity to update.
        :type target_entity: Object implementing
          :class:`everest.interfaces.IEntity`.
        """
        EntityState.set_state_data(target_entity, source_data)

    def get_all(self):
        """
        Returns the list of all entities in this cache in the order they
        were added.
        """
        return self.__entities

    def retrieve(self, filter_expression=None,
                 order_expression=None, slice_key=None):
        """
        Retrieve entities from this cache, possibly after filtering, ordering
        and slicing.
        """
        ents = iter(self.__entities)
        if not filter_expression is None:
            ents = filter_expression(ents)
        if not order_expression is None:
            # Ordering always involves a copy and conversion to a list, so
            # we have to wrap in an iterator.
            ents = iter(order_expression(ents))
        if not slice_key is None:
            ents = islice(ents, slice_key.start, slice_key.stop)
        return ents

    def rebuild(self, entities):
        """
        Rebuilds the ID and slug maps of this cache.

        This can be necessary when entities obtain their IDs only after
        they have been flushed to the backend.
        """
        for ent in entities:
            self.__check_new(ent)

    def __contains__(self, entity):
        if not entity.id is None:
            is_contained = entity.id in self.__id_map
        else:
            is_contained = entity in self.__entities
        return is_contained

    def __check_new(self, entity):
        # For certain use cases (e.g., staging), we do not want the entity to
        # be added to have an ID yet.
        do_append = True
        if not entity.id is None:
            if entity.id in self.__id_map:
                if not self.__id_map[entity.id] is entity:
                    raise ValueError('Duplicate entity ID "%s". %s'
                                     % (entity.id, entity))
                else:
                    do_append = False
            else:
                self.__id_map[entity.id] = entity
        elif not self.__allow_none_id:
            raise ValueError('Entity ID must not be None.')
        # The slug can be a lazy attribute depending on the
        # value of other (possibly not yet initialized) attributes which is
        # why we can not always assume it is available at this point.
        if do_append and hasattr(entity, 'slug') and not entity.slug is None:
            ents = self.__slug_map.get(entity.slug)
            if not ents is None:
                ents.append(entity)
            else:
                self.__slug_map[entity.slug] = WeakList([entity])
        return do_append
Ejemplo n.º 58
0
class Boss:
    def __init__(self, os_window_id, opts, args, cached_values,
                 new_os_window_trigger):
        set_layout_options(opts)
        self.clipboard_buffers = {}
        self.update_check_process = None
        self.window_id_map = WeakValueDictionary()
        self.startup_colors = {
            k: opts[k]
            for k in opts if isinstance(opts[k], Color)
        }
        self.startup_cursor_text_color = opts.cursor_text_color
        self.pending_sequences = None
        self.cached_values = cached_values
        self.os_window_map = {}
        self.os_window_death_actions = {}
        self.cursor_blinking = True
        self.shutting_down = False
        talk_fd = getattr(single_instance, 'socket', None)
        talk_fd = -1 if talk_fd is None else talk_fd.fileno()
        listen_fd = -1
        if opts.allow_remote_control and args.listen_on:
            listen_fd = listen_on(args.listen_on)
        self.child_monitor = ChildMonitor(
            self.on_child_death,
            DumpCommands(args) if args.dump_commands or args.dump_bytes else
            None, talk_fd, listen_fd)
        set_boss(self)
        self.opts, self.args = opts, args
        startup_sessions = create_sessions(
            opts, args, default_session=opts.startup_session)
        self.keymap = self.opts.keymap.copy()
        if new_os_window_trigger is not None:
            self.keymap.pop(new_os_window_trigger, None)
        for startup_session in startup_sessions:
            self.add_os_window(startup_session, os_window_id=os_window_id)
            os_window_id = None
            if args.start_as != 'normal':
                if args.start_as == 'fullscreen':
                    self.toggle_fullscreen()
                else:
                    change_os_window_state(args.start_as)
        if is_macos:
            from .fast_data_types import cocoa_set_notification_activated_callback
            cocoa_set_notification_activated_callback(
                self.notification_activated)

    def add_os_window(self,
                      startup_session,
                      os_window_id=None,
                      wclass=None,
                      wname=None,
                      opts_for_size=None,
                      startup_id=None):
        if os_window_id is None:
            opts_for_size = opts_for_size or startup_session.os_window_size or self.opts
            cls = wclass or self.args.cls or appname
            with startup_notification_handler(
                    do_notify=startup_id is not None,
                    startup_id=startup_id) as pre_show_callback:
                os_window_id = create_os_window(
                    initial_window_size_func(opts_for_size,
                                             self.cached_values),
                    pre_show_callback, appname, wname or self.args.name or cls,
                    cls)
        tm = TabManager(os_window_id, self.opts, self.args, startup_session)
        self.os_window_map[os_window_id] = tm
        return os_window_id

    def list_os_windows(self):
        with cached_process_data():
            active_tab, active_window = self.active_tab, self.active_window
            active_tab_manager = self.active_tab_manager
            for os_window_id, tm in self.os_window_map.items():
                yield {
                    'id': os_window_id,
                    'is_focused': tm is active_tab_manager,
                    'tabs': list(tm.list_tabs(active_tab, active_window)),
                }

    @property
    def all_tab_managers(self):
        yield from self.os_window_map.values()

    @property
    def all_tabs(self):
        for tm in self.all_tab_managers:
            yield from tm

    @property
    def all_windows(self):
        for tab in self.all_tabs:
            yield from tab

    def match_windows(self, match):
        try:
            field, exp = match.split(':', 1)
        except ValueError:
            return
        if field == 'num':
            tab = self.active_tab
            if tab is not None:
                try:
                    w = tab.get_nth_window(int(exp))
                except Exception:
                    return
                if w is not None:
                    yield w
            return
        if field == 'env':
            kp, vp = exp.partition('=')[::2]
            if vp:
                pat = tuple(map(re.compile, (kp, vp)))
            else:
                pat = re.compile(kp), None
        else:
            pat = re.compile(exp)
        for window in self.all_windows:
            if window.matches(field, pat):
                yield window

    def tab_for_window(self, window):
        for tab in self.all_tabs:
            for w in tab:
                if w.id == window.id:
                    return tab

    def match_tabs(self, match):
        try:
            field, exp = match.split(':', 1)
        except ValueError:
            return
        pat = re.compile(exp)
        found = False
        if field in ('title', 'id'):
            for tab in self.all_tabs:
                if tab.matches(field, pat):
                    yield tab
                    found = True
        if not found:
            tabs = {self.tab_for_window(w) for w in self.match_windows(match)}
            for tab in tabs:
                if tab:
                    yield tab

    def set_active_window(self, window):
        for os_window_id, tm in self.os_window_map.items():
            for tab in tm:
                for w in tab:
                    if w.id == window.id:
                        if tab is not self.active_tab:
                            tm.set_active_tab(tab)
                        tab.set_active_window(w)
                        return os_window_id

    def _new_os_window(self, args, cwd_from=None):
        if isinstance(args, SpecialWindowInstance):
            sw = args
        else:
            sw = self.args_to_special_window(args, cwd_from) if args else None
        startup_session = next(
            create_sessions(self.opts, special_window=sw, cwd_from=cwd_from))
        return self.add_os_window(startup_session)

    def new_os_window(self, *args):
        self._new_os_window(args)

    @property
    def active_window_for_cwd(self):
        w = self.active_window
        if w is not None and w.overlay_for is not None and w.overlay_for in self.window_id_map:
            w = self.window_id_map[w.overlay_for]
        return w

    def new_os_window_with_cwd(self, *args):
        w = self.active_window_for_cwd
        cwd_from = w.child.pid_for_cwd if w is not None else None
        self._new_os_window(args, cwd_from)

    def new_os_window_with_wd(self, wd):
        special_window = SpecialWindow(None, cwd=wd)
        self._new_os_window(special_window)

    def add_child(self, window):
        self.child_monitor.add_child(window.id, window.child.pid,
                                     window.child.child_fd, window.screen)
        self.window_id_map[window.id] = window

    def _handle_remote_command(self, cmd, window=None):
        response = None
        if self.opts.allow_remote_control or getattr(
                window, 'allow_remote_control', False):
            try:
                response = handle_cmd(self, window, cmd)
            except Exception as err:
                import traceback
                response = {'ok': False, 'error': str(err)}
                if not getattr(err, 'hide_traceback', False):
                    response['tb'] = traceback.format_exc()
        else:
            response = {
                'ok':
                False,
                'error':
                'Remote control is disabled. Add allow_remote_control yes to your kitty.conf'
            }
        return response

    def peer_message_received(self, msg):
        msg = msg.decode('utf-8')
        cmd_prefix = '\x1bP@kitty-cmd'
        if msg.startswith(cmd_prefix):
            cmd = msg[len(cmd_prefix):-2]
            response = self._handle_remote_command(cmd)
            if response is not None:
                response = (cmd_prefix + json.dumps(response) +
                            '\x1b\\').encode('utf-8')
            return response
        else:
            msg = json.loads(msg)
            if isinstance(msg, dict) and msg.get('cmd') == 'new_instance':
                startup_id = msg.get('startup_id')
                args, rest = parse_args(msg['args'][1:])
                args.args = rest
                opts = create_opts(args)
                if not os.path.isabs(args.directory):
                    args.directory = os.path.join(msg['cwd'], args.directory)
                for session in create_sessions(opts, args, respect_cwd=True):
                    os_window_id = self.add_os_window(session,
                                                      wclass=args.cls,
                                                      wname=args.name,
                                                      opts_for_size=opts,
                                                      startup_id=startup_id)
                    if msg.get('notify_on_os_window_death'):
                        self.os_window_death_actions[os_window_id] = partial(
                            self.notify_on_os_window_death,
                            msg['notify_on_os_window_death'])
            else:
                log_error('Unknown message received from peer, ignoring')

    def handle_remote_cmd(self, cmd, window=None):
        response = self._handle_remote_command(cmd, window)
        if response is not None:
            if window is not None:
                window.send_cmd_response(response)

    def on_child_death(self, window_id):
        window = self.window_id_map.pop(window_id, None)
        if window is None:
            return
        if window.action_on_close:
            try:
                window.action_on_close(window)
            except Exception:
                import traceback
                traceback.print_exc()
        os_window_id = window.os_window_id
        window.destroy()
        tm = self.os_window_map.get(os_window_id)
        if tm is None:
            return
        for tab in tm:
            if window in tab:
                break
        else:
            return
        tab.remove_window(window)
        if len(tab) == 0:
            tm.remove(tab)
            tab.destroy()
            if len(tm) == 0:
                if not self.shutting_down:
                    mark_os_window_for_close(os_window_id)

    def close_window(self, window=None):
        if window is None:
            window = self.active_window
        self.child_monitor.mark_for_close(window.id)

    def close_tab(self, tab=None):
        if tab is None:
            tab = self.active_tab
        for window in tab:
            self.close_window(window)

    def toggle_fullscreen(self):
        toggle_fullscreen()

    def toggle_maximized(self):
        toggle_maximized()

    def start(self):
        if not getattr(self, 'io_thread_started', False):
            self.child_monitor.start()
            self.io_thread_started = True
        if self.opts.update_check_interval > 0 and not hasattr(
                self, 'update_check_started'):
            from .update_check import run_update_check
            run_update_check(self.opts.update_check_interval * 60 * 60)
            self.update_check_started = True

    def activate_tab_at(self, os_window_id, x):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            tm.activate_tab_at(x)

    def on_window_resize(self, os_window_id, w, h, dpi_changed):
        if dpi_changed:
            self.on_dpi_change(os_window_id)
        else:
            tm = self.os_window_map.get(os_window_id)
            if tm is not None:
                tm.resize()

    def clear_terminal(self, action, only_active):
        if only_active:
            windows = []
            w = self.active_window
            if w is not None:
                windows.append(w)
        else:
            windows = self.all_windows
        reset = action == 'reset'
        how = 3 if action == 'scrollback' else 2
        for w in windows:
            if action == 'scroll':
                w.screen.scroll_until_cursor()
                continue
            w.screen.cursor.x = w.screen.cursor.y = 0
            if reset:
                w.screen.reset()
            else:
                w.screen.erase_in_display(how, False)

    def increase_font_size(self):  # legacy
        cfs = global_font_size()
        self.set_font_size(min(self.opts.font_size * 5, cfs + 2.0))

    def decrease_font_size(self):  # legacy
        cfs = global_font_size()
        self.set_font_size(max(MINIMUM_FONT_SIZE, cfs - 2.0))

    def restore_font_size(self):  # legacy
        self.set_font_size(self.opts.font_size)

    def set_font_size(self, new_size):  # legacy
        self.change_font_size(True, None, new_size)

    def change_font_size(self, all_windows, increment_operation, amt):
        def calc_new_size(old_size):
            new_size = old_size
            if amt == 0:
                new_size = self.opts.font_size
            else:
                if increment_operation:
                    new_size += (1 if increment_operation == '+' else -1) * amt
                else:
                    new_size = amt
                new_size = max(MINIMUM_FONT_SIZE,
                               min(new_size, self.opts.font_size * 5))
            return new_size

        if all_windows:
            current_global_size = global_font_size()
            new_size = calc_new_size(current_global_size)
            if new_size != current_global_size:
                global_font_size(new_size)
            os_windows = tuple(self.os_window_map.keys())
        else:
            os_windows = []
            w = self.active_window
            if w is not None:
                os_windows.append(w.os_window_id)
        if os_windows:
            final_windows = {}
            for wid in os_windows:
                current_size = os_window_font_size(wid)
                if current_size:
                    new_size = calc_new_size(current_size)
                    if new_size != current_size:
                        final_windows[wid] = new_size
            if final_windows:
                self._change_font_size(final_windows)

    def _change_font_size(self, sz_map):
        for os_window_id, sz in sz_map.items():
            tm = self.os_window_map.get(os_window_id)
            if tm is not None:
                os_window_font_size(os_window_id, sz)
                tm.resize()

    def on_dpi_change(self, os_window_id):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            sz = os_window_font_size(os_window_id)
            if sz:
                os_window_font_size(os_window_id, sz, True)
                tm.resize()

    def _set_os_window_background_opacity(self, os_window_id, opacity):
        change_background_opacity(os_window_id, max(0.1, min(opacity, 1.0)))

    def set_background_opacity(self, opacity):
        window = self.active_window
        if window is None or not opacity:
            return
        if not self.opts.dynamic_background_opacity:
            return self.show_error(
                _('Cannot change background opacity'),
                _('You must set the dynamic_background_opacity option in kitty.conf to be able to change background opacity'
                  ))
        os_window_id = window.os_window_id
        if opacity[0] in '+-':
            old_opacity = background_opacity_of(os_window_id)
            if old_opacity is None:
                return
            opacity = old_opacity + float(opacity)
        elif opacity == 'default':
            opacity = self.opts.background_opacity
        else:
            opacity = float(opacity)
        self._set_os_window_background_opacity(os_window_id, opacity)

    @property
    def active_tab_manager(self):
        os_window_id = current_os_window()
        return self.os_window_map.get(os_window_id)

    @property
    def active_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            return tm.active_tab

    @property
    def active_window(self):
        t = self.active_tab
        if t is not None:
            return t.active_window

    def dispatch_special_key(self, key, scancode, action, mods):
        # Handles shortcuts, return True if the key was consumed
        key_action = get_shortcut(self.keymap, mods, key, scancode)
        if key_action is None:
            sequences = get_shortcut(self.opts.sequence_map, mods, key,
                                     scancode)
            if sequences:
                self.pending_sequences = sequences
                set_in_sequence_mode(True)
                return True
        else:
            self.current_key_press_info = key, scancode, action, mods
            return self.dispatch_action(key_action)

    def process_sequence(self, key, scancode, action, mods):
        if not self.pending_sequences:
            set_in_sequence_mode(False)

        remaining = {}
        matched_action = None
        for seq, key_action in self.pending_sequences.items():
            if shortcut_matches(seq[0], mods, key, scancode):
                seq = seq[1:]
                if seq:
                    remaining[seq] = key_action
                else:
                    matched_action = key_action

        if remaining:
            self.pending_sequences = remaining
        else:
            self.pending_sequences = None
            set_in_sequence_mode(False)
            if matched_action is not None:
                self.dispatch_action(matched_action)

    def start_resizing_window(self):
        w = self.active_window
        if w is None:
            return
        overlay_window = self._run_kitten(
            'resize_window',
            args=[
                '--horizontal-increment={}'.format(
                    self.opts.window_resize_step_cells),
                '--vertical-increment={}'.format(
                    self.opts.window_resize_step_lines)
            ])
        if overlay_window is not None:
            overlay_window.allow_remote_control = True

    def resize_layout_window(self,
                             window,
                             increment,
                             is_horizontal,
                             reset=False):
        tab = window.tabref()
        if tab is None or not increment:
            return False
        if reset:
            return tab.reset_window_sizes()
        return tab.resize_window_by(window.id, increment, is_horizontal)

    def default_bg_changed_for(self, window_id):
        w = self.window_id_map.get(window_id)
        if w is not None:
            tm = self.os_window_map.get(w.os_window_id)
            if tm is not None:
                tm.update_tab_bar_data()
                tm.mark_tab_bar_dirty()
                t = tm.tab_for_id(w.tab_id)
                if t is not None:
                    t.relayout_borders()

    def dispatch_action(self, key_action):
        if key_action is not None:
            f = getattr(self, key_action.func, None)
            if f is not None:
                if self.args.debug_keyboard:
                    print('Keypress matched action:', func_name(f))
                passthrough = f(*key_action.args)
                if passthrough is not True:
                    return True
        tab = self.active_tab
        if tab is None:
            return False
        window = self.active_window
        if window is None:
            return False
        if key_action is not None:
            f = getattr(tab, key_action.func,
                        getattr(window, key_action.func, None))
            if f is not None:
                passthrough = f(*key_action.args)
                if self.args.debug_keyboard:
                    print('Keypress matched action:', func_name(f))
                if passthrough is not True:
                    return True
        return False

    def combine(self, *actions):
        for key_action in actions:
            self.dispatch_action(key_action)

    def on_focus(self, os_window_id, focused):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.focus_changed(focused)
            tm.mark_tab_bar_dirty()

    def update_tab_bar_data(self, os_window_id):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            tm.update_tab_bar_data()

    def on_drop(self, os_window_id, paths):
        tm = self.os_window_map.get(os_window_id)
        if tm is not None:
            w = tm.active_window
            if w is not None:
                w.paste('\n'.join(paths))

    def on_os_window_closed(self, os_window_id, viewport_width,
                            viewport_height):
        self.cached_values['window-size'] = viewport_width, viewport_height
        tm = self.os_window_map.pop(os_window_id, None)
        if tm is not None:
            tm.destroy()
        for window_id in tuple(
                w.id for w in self.window_id_map.values()
                if getattr(w, 'os_window_id', None) == os_window_id):
            self.window_id_map.pop(window_id, None)
        action = self.os_window_death_actions.pop(os_window_id, None)
        if action is not None:
            action()

    def notify_on_os_window_death(self, address):
        import socket
        s = socket.socket(family=socket.AF_UNIX)
        with suppress(Exception):
            s.connect(address)
            s.sendall(b'c')
            with suppress(EnvironmentError):
                s.shutdown(socket.SHUT_RDWR)
            s.close()

    def display_scrollback(self, window, data, cmd):
        tab = self.active_tab
        if tab is not None and window.overlay_for is None:
            tab.new_special_window(
                SpecialWindow(cmd, data, _('History'), overlay_for=window.id))

    def edit_config_file(self, *a):
        confpath = prepare_config_file_for_editing()
        # On macOS vim fails to handle SIGWINCH if it occurs early, so add a
        # small delay.
        cmd = [
            kitty_exe(), '+runpy',
            'import os, sys, time; time.sleep(0.05); os.execvp(sys.argv[1], sys.argv[1:])'
        ] + get_editor() + [confpath]
        self.new_os_window(*cmd)

    def get_output(self, source_window, num_lines=1):
        output = ''
        s = source_window.screen
        if num_lines is None:
            num_lines = s.lines
        for i in range(min(num_lines, s.lines)):
            output += str(s.linebuf.line(i))
        return output

    def _run_kitten(self, kitten, args=(), input_data=None, window=None):
        orig_args, args = list(args), list(args)
        from kittens.runner import create_kitten_handler
        end_kitten = create_kitten_handler(kitten, orig_args)
        if window is None:
            w = self.active_window
            tab = self.active_tab
        else:
            w = window
            tab = w.tabref()
        if end_kitten.no_ui:
            end_kitten(None, getattr(w, 'id', None), self)
            return

        if w is not None and tab is not None and w.overlay_for is None:
            args[0:0] = [config_dir, kitten]
            if input_data is None:
                type_of_input = end_kitten.type_of_input
                if type_of_input in ('text', 'history', 'ansi', 'ansi-history',
                                     'screen', 'screen-history', 'screen-ansi',
                                     'screen-ansi-history'):
                    data = w.as_text(as_ansi='ansi' in type_of_input,
                                     add_history='history' in type_of_input,
                                     add_wrap_markers='screen'
                                     in type_of_input).encode('utf-8')
                elif type_of_input is None:
                    data = None
                else:
                    raise ValueError(
                        'Unknown type_of_input: {}'.format(type_of_input))
            else:
                data = input_data
            if isinstance(data, str):
                data = data.encode('utf-8')
            copts = {
                k: self.opts[k]
                for k in ('select_by_word_characters', 'open_url_with')
            }
            overlay_window = tab.new_special_window(
                SpecialWindow([
                    kitty_exe(), '+runpy',
                    'from kittens.runner import main; main()'
                ] + args,
                              stdin=data,
                              env={
                                  'KITTY_COMMON_OPTS': json.dumps(copts),
                                  'KITTY_CHILD_PID': w.child.pid,
                                  'PYTHONWARNINGS': 'ignore',
                                  'OVERLAID_WINDOW_LINES': str(w.screen.lines),
                                  'OVERLAID_WINDOW_COLS':
                                  str(w.screen.columns),
                              },
                              cwd=w.cwd_of_child,
                              overlay_for=w.id))
            overlay_window.action_on_close = partial(self.on_kitten_finish,
                                                     w.id, end_kitten)
            return overlay_window

    def kitten(self, kitten, *args):
        import shlex
        cmdline = args[0] if args else ''
        args = shlex.split(cmdline) if cmdline else []
        self._run_kitten(kitten, args)

    def on_kitten_finish(self, target_window_id, end_kitten, source_window):
        output = self.get_output(source_window, num_lines=None)
        from kittens.runner import deserialize
        data = deserialize(output)
        if data is not None:
            end_kitten(data, target_window_id, self)

    def input_unicode_character(self):
        self._run_kitten('unicode_input')

    def set_tab_title(self):
        tab = self.active_tab
        if tab:
            args = [
                '--name=tab-title', '--message',
                _('Enter the new title for this tab below.'),
                'do_set_tab_title',
                str(tab.id)
            ]
            self._run_kitten('ask', args)

    def show_error(self, title, msg):
        self._run_kitten('show_error', args=['--title', title], input_data=msg)

    def do_set_tab_title(self, title, tab_id):
        tm = self.active_tab_manager
        if tm is not None and title:
            tab_id = int(tab_id)
            for tab in tm.tabs:
                if tab.id == tab_id:
                    tab.set_title(title)
                    break

    def kitty_shell(self, window_type):
        cmd = ['@', kitty_exe(), '@']
        if window_type == 'tab':
            self._new_tab(cmd)
        elif window_type == 'os_window':
            os_window_id = self._new_os_window(cmd)
            self.os_window_map[os_window_id]
        elif window_type == 'overlay':
            w = self.active_window
            tab = self.active_tab
            if w is not None and tab is not None and w.overlay_for is None:
                tab.new_special_window(SpecialWindow(cmd, overlay_for=w.id))
        else:
            self._new_window(cmd)

    def switch_focus_to(self, window_idx):
        tab = self.active_tab
        tab.set_active_window_idx(window_idx)

    def open_url(self, url, program=None, cwd=None):
        if url:
            if isinstance(program, str):
                program = to_cmdline(program)
            open_url(url, program or self.opts.open_url_with, cwd=cwd)

    def open_url_lines(self, lines, program=None):
        self.open_url(''.join(lines), program)

    def destroy(self):
        self.shutting_down = True
        self.child_monitor.shutdown_monitor()
        self.set_update_check_process()
        self.update_check_process = None
        del self.child_monitor
        for tm in self.os_window_map.values():
            tm.destroy()
        self.os_window_map = {}
        destroy_global_data()

    def paste_to_active_window(self, text):
        if text:
            w = self.active_window
            if w is not None:
                w.paste(text)

    def paste_from_clipboard(self):
        text = get_clipboard_string()
        self.paste_to_active_window(text)

    def paste_from_selection(self):
        text = get_primary_selection(
        ) if supports_primary_selection else get_clipboard_string()
        self.paste_to_active_window(text)

    def set_primary_selection(self):
        w = self.active_window
        if w is not None and not w.destroyed:
            text = w.text_for_selection()
            if text:
                set_primary_selection(text)
                if self.opts.copy_on_select:
                    self.copy_to_buffer(self.opts.copy_on_select)

    def copy_to_buffer(self, buffer_name):
        w = self.active_window
        if w is not None and not w.destroyed:
            text = w.text_for_selection()
            if text:
                if buffer_name == 'clipboard':
                    set_clipboard_string(text)
                elif buffer_name == 'primary':
                    set_primary_selection(text)
                else:
                    self.clipboard_buffers[buffer_name] = text

    def paste_from_buffer(self, buffer_name):
        if buffer_name == 'clipboard':
            text = get_clipboard_string()
        elif buffer_name == 'primary':
            text = get_primary_selection()
        else:
            text = self.clipboard_buffers.get(buffer_name)
        if text:
            self.paste_to_active_window(text)

    def goto_tab(self, tab_num):
        tm = self.active_tab_manager
        if tm is not None:
            tm.goto_tab(tab_num - 1)

    def set_active_tab(self, tab):
        tm = self.active_tab_manager
        if tm is not None:
            return tm.set_active_tab(tab)
        return False

    def next_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab()

    def previous_tab(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.next_tab(-1)

    prev_tab = previous_tab

    def process_stdin_source(self, window=None, stdin=None):
        w = window or self.active_window
        env = None
        if stdin:
            add_wrap_markers = stdin.endswith('_wrap')
            if add_wrap_markers:
                stdin = stdin[:-len('_wrap')]
            stdin = data_for_at(w, stdin, add_wrap_markers=add_wrap_markers)
            if stdin is not None:
                pipe_data = w.pipe_data(
                    stdin, has_wrap_markers=add_wrap_markers) if w else {}
                if pipe_data:
                    env = {
                        'KITTY_PIPE_DATA':
                        '{scrolled_by}:{cursor_x},{cursor_y}:{lines},{columns}'
                        .format(**pipe_data)
                    }
                stdin = stdin.encode('utf-8')
        return env, stdin

    def special_window_for_cmd(self,
                               cmd,
                               window=None,
                               stdin=None,
                               cwd_from=None,
                               as_overlay=False):
        w = window or self.active_window
        env, stdin = self.process_stdin_source(w, stdin)
        cmdline = []
        for arg in cmd:
            if arg == '@selection':
                arg = data_for_at(w, arg)
                if not arg:
                    continue
            cmdline.append(arg)
        overlay_for = w.id if as_overlay and w.overlay_for is None else None
        return SpecialWindow(cmd,
                             stdin,
                             cwd_from=cwd_from,
                             overlay_for=overlay_for,
                             env=env)

    def pipe(self, source, dest, exe, *args):
        cmd = [exe] + list(args)
        window = self.active_window
        cwd_from = window.child.pid_for_cwd if window else None

        def create_window():
            return self.special_window_for_cmd(cmd,
                                               stdin=source,
                                               as_overlay=dest == 'overlay',
                                               cwd_from=cwd_from)

        if dest == 'overlay' or dest == 'window':
            tab = self.active_tab
            if tab is not None:
                return tab.new_special_window(create_window())
        elif dest == 'tab':
            tm = self.active_tab_manager
            if tm is not None:
                tm.new_tab(special_window=create_window(), cwd_from=cwd_from)
        elif dest == 'os_window':
            self._new_os_window(create_window(), cwd_from=cwd_from)
        elif dest in ('clipboard', 'primary'):
            env, stdin = self.process_stdin_source(stdin=source, window=window)
            if stdin:
                func = set_clipboard_string if dest == 'clipboard' else set_primary_selection
                func(stdin)
        else:
            import subprocess
            env, stdin = self.process_stdin_source(stdin=source, window=window)
            cwd = None
            if cwd_from:
                with suppress(Exception):
                    cwd = cwd_of_process(cwd_from)
            if stdin:
                r, w = safe_pipe(False)
                try:
                    subprocess.Popen(cmd, env=env, stdin=r, cwd=cwd)
                except Exception:
                    os.close(w)
                else:
                    thread_write(w, stdin)
                finally:
                    os.close(r)
            else:
                subprocess.Popen(cmd, env=env, cwd=cwd)

    def args_to_special_window(self, args, cwd_from=None):
        args = list(args)
        stdin = None
        w = self.active_window

        if args[0].startswith('@') and args[0] != '@':
            stdin = data_for_at(w, args[0]) or None
            if stdin is not None:
                stdin = stdin.encode('utf-8')
            del args[0]

        cmd = []
        for arg in args:
            if arg == '@selection':
                arg = data_for_at(w, arg)
                if not arg:
                    continue
            cmd.append(arg)
        return SpecialWindow(cmd, stdin, cwd_from=cwd_from)

    def _new_tab(self, args, cwd_from=None, as_neighbor=False):
        special_window = None
        if args:
            if isinstance(args, SpecialWindowInstance):
                special_window = args
            else:
                special_window = self.args_to_special_window(args,
                                                             cwd_from=cwd_from)
        tm = self.active_tab_manager
        if tm is not None:
            return tm.new_tab(special_window=special_window,
                              cwd_from=cwd_from,
                              as_neighbor=as_neighbor)

    def _create_tab(self, args, cwd_from=None):
        as_neighbor = False
        if args and args[0].startswith('!'):
            as_neighbor = 'neighbor' in args[0][1:].split(',')
            args = args[1:]
        self._new_tab(args, as_neighbor=as_neighbor, cwd_from=cwd_from)

    def new_tab(self, *args):
        self._create_tab(args)

    def new_tab_with_cwd(self, *args):
        w = self.active_window_for_cwd
        cwd_from = w.child.pid_for_cwd if w is not None else None
        self._create_tab(args, cwd_from=cwd_from)

    def new_tab_with_wd(self, wd):
        special_window = SpecialWindow(None, cwd=wd)
        self._new_tab(special_window)

    def _new_window(self, args, cwd_from=None):
        tab = self.active_tab
        if tab is not None:
            location = None
            if args and args[0].startswith('!'):
                location = args[0][1:].lower()
                args = args[1:]
            if args:
                return tab.new_special_window(self.args_to_special_window(
                    args, cwd_from=cwd_from),
                                              location=location)
            else:
                return tab.new_window(cwd_from=cwd_from, location=location)

    def new_window(self, *args):
        self._new_window(args)

    def new_window_with_cwd(self, *args):
        w = self.active_window_for_cwd
        if w is None:
            return self.new_window(*args)
        cwd_from = w.child.pid_for_cwd if w is not None else None
        self._new_window(args, cwd_from=cwd_from)

    def move_tab_forward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(1)

    def move_tab_backward(self):
        tm = self.active_tab_manager
        if tm is not None:
            tm.move_tab(-1)

    def disable_ligatures_in(self, where, strategy):
        if isinstance(where, str):
            windows = ()
            if where == 'active':
                if self.active_window is not None:
                    windows = (self.active_window, )
            elif where == 'all':
                windows = self.all_windows
            elif where == 'tab':
                if self.active_tab is not None:
                    windows = tuple(self.active_tab)
        else:
            windows = where
        for window in windows:
            window.screen.disable_ligatures = strategy
            window.refresh()

    def patch_colors(self, spec, cursor_text_color, configured=False):
        if configured:
            for k, v in spec.items():
                if hasattr(self.opts, k):
                    setattr(self.opts, k, color_from_int(v))
            if cursor_text_color is not False:
                if isinstance(cursor_text_color, int):
                    cursor_text_color = color_from_int(cursor_text_color)
                self.opts.cursor_text_color = cursor_text_color
        for tm in self.all_tab_managers:
            tm.tab_bar.patch_colors(spec)
        patch_global_colors(spec, configured)

    def safe_delete_temp_file(self, path):
        if is_path_in_temp_dir(path):
            with suppress(FileNotFoundError):
                os.remove(path)

    def set_update_check_process(self, process=None):
        if self.update_check_process is not None:
            with suppress(Exception):
                if self.update_check_process.poll() is None:
                    self.update_check_process.kill()
        self.update_check_process = process

    def on_monitored_pid_death(self, pid, exit_status):
        update_check_process = getattr(self, 'update_check_process', None)
        if update_check_process is not None and pid == update_check_process.pid:
            self.update_check_process = None
            from .update_check import process_current_release
            try:
                raw = update_check_process.stdout.read().decode('utf-8')
            except Exception as e:
                log_error(
                    'Failed to read data from update check process, with error: {}'
                    .format(e))
            else:
                try:
                    process_current_release(raw)
                except Exception as e:
                    log_error(
                        'Failed to process update check data {!r}, with error: {}'
                        .format(raw, e))

    def notification_activated(self, identifier):
        if identifier == 'new-version':
            from .update_check import notification_activated
            notification_activated()

    def dbus_notification_callback(self, activated, *args):
        from .notify import dbus_notification_created, dbus_notification_activated
        if activated:
            dbus_notification_activated(*args)
        else:
            dbus_notification_created(*args)

    def show_bad_config_lines(self, bad_lines):
        def format_bad_line(bad_line):
            return '{}:{} in line: {}\n'.format(bad_line.number,
                                                bad_line.exception,
                                                bad_line.line)

        msg = '\n'.join(map(format_bad_line, bad_lines)).rstrip()
        self.show_error(_('Errors in kitty.conf'), msg)
Ejemplo n.º 59
0
class ConnListener(object):
    '''Manager for listening socket and connections still in the matchmaking
    process.'''

    def __init__(self):
        # Get a list of potential bind addresses
        addrs = socket.getaddrinfo(None, PORT, 0, socket.SOCK_STREAM, 0,
                                    socket.AI_PASSIVE)
        # Try to bind to each address
        socks = []
        for family, type, proto, _canonname, addr in addrs:
            try:
                sock = socket.socket(family, type, proto)
                sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
                if family == socket.AF_INET6:
                    # Ensure an IPv6 listener doesn't also bind to IPv4,
                    # since depending on the order of getaddrinfo return
                    # values this could cause the IPv6 bind to fail
                    sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
                sock.bind(addr)
                sock.listen(BACKLOG)
                sock.setblocking(0)
                socks.append(sock)
            except socket.error:
                pass
        if not socks:
            # None of the addresses worked
            raise ListenError("Couldn't bind listening socket")

        self._poll = _PendingConnPollSet()
        for sock in socks:
            self._poll.register(_ListeningSocket(sock), select.POLLIN)
        self._nonce_to_pending = WeakValueDictionary()

    def _accept(self, lsock):
        '''Accept waiting connections and add them to the pollset.'''
        try:
            while True:
                sock, addr = lsock.accept()
                sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
                host = addr[0]
                if connection_ok('diamondd', host):
                    pconn = _PendingConn(sock, host)
                    _log.debug('New connection from %s', pconn.peer)
                    self._poll.register(pconn, select.POLLIN)
                else:
                    sock.close()
                    _log.info('Rejected connection from %s', host)
        except socket.error:
            pass

    def _traffic(self, pconn):
        '''Handle poll readiness events on the specified pconn.'''
        try:
            # Continue trying to read the nonce
            ret = pconn.read_nonce()
            if ret is not None:
                # Have the nonce.
                if ret == CONTROL:
                    _log.debug('Control connection from %s, nonce %s',
                                        pconn.peer, pconn.nonce_str)
                    pconn.send_nonce()
                    self._nonce_to_pending[pconn.nonce] = pconn
                else:
                    control = self._nonce_to_pending.get(pconn.nonce, None)
                    if control is not None:
                        # We have a match!  Clean up pending state and
                        # return the connection handles.
                        _log.debug('Data connection from %s, accepted ' +
                                            'nonce %s', pconn.peer,
                                            pconn.nonce_str)
                        pconn.send_nonce()
                        self._poll.unregister(control)
                        self._poll.unregister(pconn)
                        return (control.sock, pconn.sock)
                    else:
                        # No control connection for this data connection.
                        # Close it.
                        _log.warning('Data connection from %s, unknown ' +
                                            'nonce %s', pconn.peer,
                                            pconn.nonce_str)
                        self._poll.unregister(pconn)
        except _ConnectionClosed:
            # Connection died, clean it up.  _nonce_to_pending holds a weak
            # reference to the pconn, so this should be sufficient to GC
            # the pconn and close the fd.
            _log.warning('Connection to %s died during setup', pconn.peer)
            self._poll.unregister(pconn)
        return None

    def accept(self):
        '''Returns a new (control, data) connection pair.'''
        while True:
            for pconn, _flags in self._poll.poll():
                if hasattr(pconn, 'accept'):
                    # Listening socket
                    self._accept(pconn)
                else:
                    # Traffic on a pending connection; attempt to pair it
                    ret = self._traffic(pconn)
                    if ret is not None:
                        return ret
                # pconn may now be a dead connection; allow it to be GC'd
                pconn = None

    def shutdown(self):
        '''Close listening socket and all pending connections.'''
        self._poll.close()