Esempio n. 1
0
class SymbolList:
    def __init__(self):
        self.terminals = dict()
        self.terminals_lock = RLock()
        self.nonterminals = set()

    def make_nonterminal(self) -> Nonterminal:
        out = Nonterminal()
        self.nonterminals.add(out)
        return out

    def get_or_make_terminal(self, character: str) -> Terminal:
        if len(character) != 1:
            raise ValueError(
                "Terminals must have a string representation of length 1")

        out = self.terminals.get(character, None)
        if out:
            return out
        else:
            return self.make_terminal(character)

    def make_terminal(self, character: str) -> Terminal:
        """
        Constructs a new Terminal with specified character. The process fails if a Terminal was already registered with
        the supplied character, or if the supplied character does not have length exactly one (that is, it should be a
        character, not just a string). Also registers the new Terminal internally, indexed by the character.
        
        :param character: The character to represent the new Terminal symbol.
        :return: terminal -- the new Terminal created, or None if the creation failed.
        """
        if len(character) != 1:
            raise ValueError(
                "Terminals must have a string representation of length 1")

        self.terminals_lock.acquire(True)
        if character in self.terminals:
            t = None
        else:
            t = Terminal(character)
            self.terminals[character] = t
        self.terminals_lock.release()
        return t

    def __getitem__(self, item: str) -> Terminal:
        return self.terminals[item]

    def __contains__(self, item: str) -> bool:
        return item in self.terminals
Esempio n. 2
0
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
    sentinel = object()
    make_key = _make_key
    PREV, NEXT, KEY, RESULT = 0, 1, 2, 3

    cache = {}
    hits = misses = 0
    full = False
    cache_get = cache.get
    cache_len = cache.__len__
    lock = RLock()
    root = []
    root[:] = [root, root, None, None]

    if maxsize == 0:

        def wrapper(*args, **kwds):
            nonlocal misses
            misses += 1
            result = user_function(*args, **kwds)
            return result

    elif maxsize is None:

        def wrapper(*args, **kwds):
            nonlocal hits, misses
            key = make_key(args, kwds, typed)
            result = cache_get(key, sentinel)
            if result is not sentinel:
                hits += 1
                return result
            misses += 1
            result = user_function(*args, **kwds)
            cache[key] = result
            return result
        
    else:

        def wrapper(*args, **kwds):
            nonlocal root, hits, misses, full
            key = make_key(args, kwds, typed)
            with lock:
                link = cache_get(key)
                if link is not None:
                    link_prev, link_next, _key, result = link
                    link_prev[NEXT] = link_next
                    link_next[PREV] = link_prev
                    last = root[PREV]
                    last[NEXT] = root[PREV] = link
                    link[PREV] = last
                    link[NEXT] = root
                    hits += 1
                    return result
                misses += 1
            result = user_function(*args, **kwds)
            with lock:
                if key in cache:
                    pass

                elif full:
Esempio n. 3
0
        def lru_cache_wrapper(*args, **kwds):
            nonlocal root, hits, misses, full
            key = make_key(args, kwds)
            with RLock():  # 线程安全锁
                link = cache.get(key)

                """ 缓存命中,移动命中节点,直接返回预设结果 """
                if link is not None:
                    """ 从链表中移除命中节点 """
                    link_prev, link_next, _key, result = link
                    link_prev[NEXT] = link_next
                    link_next[PREV] = link_prev

                    """ 将命中节点移动到队尾 """
                    last = root[PREV]
                    last[NEXT] = root[PREV] = link
                    link[PREV] = last
                    link[NEXT] = root
                    hits += 1
                    return result

            """ 缓存未命中,调用方法获取返回,创建节点,淘汰算法 """
            result = user_function(*args, **kwds)
            with RLock():
                if key in cache:
                    pass
                elif full:
                    """ 缓冲区已满,淘汰队首元素,删除缓冲区对应元素 """
                    oldroot = root
                    oldroot[KEY] = key
                    oldroot[RESULT] = result
                    root = oldroot[NEXT]
                    oldkey = root[KEY]
                    root[KEY] = root[RESULT] = None
                    del cache[oldkey]
                    cache[key] = oldroot
                else:
                    """ 缓冲区未满,直接创建节点,插入数据 """
                    last = root[PREV]
                    link = [last, root, key, result]
                    last[NEXT] = root[PREV] = cache[key] = link
                    full = (cache_len() >= maxsize)
                misses += 1
            return result
Esempio n. 4
0
 def __init__(self,parent):
     '''
     Constructor
     '''
     self.logger = logging.getLogger(__name__)
     self.depm = parent
     self.context = parent.context
     self.endpoint = None
     self.monitors = { }                 
     self.lock = RLock()
Esempio n. 5
0
def connection_cache(func: callable):
    """Connection cache for SSH sessions. This is to prevent opening a
     new, expensive connection on every command run."""
    cache = dict()
    lock = RLock()

    @wraps(func)
    def func_wrapper(host: str, username: str, *args, **kwargs):
        key = "{h}-{u}".format(h=host, u=username)
        if key in cache:
            # connection exists, check if it is still valid before
            # returning it.
            conn = cache[key]
            if conn and conn.is_active() and conn.is_authenticated():
                return conn
            else:
                # try to close a bad connection and remove it from
                # the cache.
                if conn:
                    try_close(conn)
                del cache[key]

        # key is not in the cache, so try to recreate it
        # it may have been removed just above.
        if key not in cache:
            conn = func(host, username, *args, **kwargs)
            if conn is not None:
                cache[key] = conn
            return conn

        # not sure how to reach this point, but just in case.
        return None

    def get_cache() -> dict:
        return cache

    def purge(key: str = None):
        with lock:
            if key is None:
                conns = [(k, v) for k, v in cache.items()]
            elif key in cache:
                conns = ((key, cache[key]), )
            else:
                conns = list()

            for k, v in conns:
                try_close(v)
                del cache[k]

    func_wrapper.get_cache = get_cache
    func_wrapper.purge = purge
    return func_wrapper
Esempio n. 6
0
class Throttler():

    SLEEP_TIME = 0.05

    def __init__(self, max_events: int, per_every_seconds=60):
        self.max_events_per_minute = max_events
        self.per_every_seconds = per_every_seconds
        self.events = []
        self.lock = RLock()

    def _time_periods_elapsed(self):
        curr_time = int(time.time())
        oldest_event = heapq.nsmallest(1, self.events)[0]
        return (curr_time - oldest_event) / self.per_every_seconds

    def current_rate(self):
        if len(self.events) == 0:
            return 0

        while self._time_periods_elapsed() >= 1:
            # keep only a rotation window of 1 time period
            heapq.heappop(self.events)
            if len(self.events) == 0:
                break

        # then the current rate is simply the number of events in the queue
        return len(self.events)

    def throttle(self):
        self.lock.acquire(blocking=True)
        try:
            rate = self.current_rate()
            while rate >= self.max_events_per_minute:
                time.sleep(self.SLEEP_TIME)
                rate = self.current_rate()
            heapq.heappush(self.events, int(time.time()))
        finally:
            self.lock.release()
Esempio n. 7
0
 def __init__(self, addresses, timeout):
     if len(addresses) == 0:
         raise RuntimeError('Input empty addresses')
     self._timeout = timeout
     self._connection = None
     self._retry_count = 3
     self._addresses = addresses
     for address in addresses:
         try:
             socket.gethostbyname(address[0])
         except Exception:
             raise InValidHostname(str(address[0]))
     self._leader = self._addresses[0]
     self._lock = RLock()
Esempio n. 8
0
    def __init__(self, meta_addrs, timeout=2000, load_period=10, decode_type='utf-8'):
        self._decode_type = decode_type
        self._load_period = load_period
        self._lock = RLock()
        self._space_caches = {}
        self._space_id_names = {}
        self._storage_addrs = []
        self._storage_leader = {}
        self._close = False
        self._meta_client = MetaClient(meta_addrs, timeout)
        self._meta_client.open()

        # load meta data
        self._load_all()
Esempio n. 9
0
    def __init__(self, max_size=DEFAULT_MAX_SIZE, values=None, on_miss=None):
        if max_size <= 0:
            raise ValueError('expected max_size > 0, not %r' % max_size)
        self.hit_count = self.miss_count = self.soft_miss_count = 0
        self.max_size = max_size
        root = []
        root[:] = [root, root, None, None]
        self.link_map = {}
        self.root = root
        self.lock = RLock()

        if on_miss is not None and not callable(on_miss):
            raise TypeError('expected on_miss to be a callable'
                            ' (or None), not %r' % on_miss)
        self.on_miss = on_miss

        if values:
            self.update(values)
Esempio n. 10
0
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
    sentinel = object()          # unique object used to signal cache misses
    make_key = _make_key         # build a key from the function arguments
    PREV, NEXT, KEY, RESULT = 0, 1, 2, 3   # names for the link fields

    cache = {}
    hits = misses = 0
    full = False
    cache_get = cache.get    # bound method to lookup a key or return None
    cache_len = cache.__len__  # get cache size without calling len()
    lock = RLock()           # because linkedlist updates aren't threadsafe
    root = []                # root of the circular doubly linked list
    root[:] = [root, root, None, None]     # initialize by pointing to self

    if maxsize == 0:

        def wrapper(*args, **kwds):
            nonlocal misses
            misses += 1
            result = user_function(*args, **kwds)
            return result

    elif maxsize is None:

        def wrapper(*args, **kwds):
            nonlocal hits, misses
            key = make_key(args, kwds, typed)
            result = cache_get(key, sentinel)
            if result is not sentinel:
                hits += 1
                return result
            misses += 1
            result = user_function(*args, **kwds)
            cache[key] = result
            return result

    else:

        def wrapper(*args, **kwds):
            nonlocal root, hits, misses, full
            key = make_key(args, kwds, typed)
            with lock:
                link = cache_get(key)
                if link is not None:
                    link_prev, link_next, _key, result = link
                    link_prev[NEXT] = link_next
                    link_next[PREV] = link_prev
                    last = root[PREV]
                    last[NEXT] = root[PREV] = link
                    link[PREV] = last
                    link[NEXT] = root
                    hits += 1
                    return result
                misses += 1
            result = user_function(*args, **kwds)
            with lock:
                if key in cache:
                    pass
                elif full:
                    oldroot = root
                    oldroot[KEY] = key
                    oldroot[RESULT] = result
                    root = oldroot[NEXT]
                    oldkey = root[KEY]
                    oldresult = root[RESULT]
                    root[KEY] = root[RESULT] = None
                    del cache[oldkey]
                    cache[key] = oldroot
                else:
                    last = root[PREV]
                    link = [last, root, key, result]
                    last[NEXT] = root[PREV] = cache[key] = link
                    full = (cache_len() >= maxsize)
            return result

    def cache_info():
        with lock:
            return _CacheInfo(hits, misses, maxsize, cache_len())

    def cache_clear():
        nonlocal hits, misses, full
        with lock:
            cache.clear()
            root[:] = [root, root, None, None]
            hits = misses = 0
            full = False

    wrapper.cache_info = cache_info
    wrapper.cache_clear = cache_clear
    return wrapper
Esempio n. 11
0
 def __init__(self, func: 'Callable[[Any], _T]') -> None:
     self.func = func  # type: Callable[[Any], _T]
     self.attrname = None  # type: Optional[str]
     self.__doc__ = func.__doc__
     self.lock = RLock()
Esempio n. 12
0
 def __init__(self, pool_size=100):
     self.pool = []
     self.run_pool = []
     self.pool_size = pool_size
     self.queue = Queue()
     self.lock = RLock()
Esempio n. 13
0
 def __init__(self, max_events: int, per_every_seconds=60):
     self.max_events_per_minute = max_events
     self.per_every_seconds = per_every_seconds
     self.events = []
     self.lock = RLock()
Esempio n. 14
0
    def decorating_function(user_function):

        lock = RLock()  # because linkedlist updates aren't threadsafe

        try:
            with open(filename, 'rb') as f:
                cache = pickle.load(f)
        except BaseException:
            cache = {}

        def cache_save():
            with lock:
                with open(filename, 'wb') as f:
                    pickle.dump(cache, f)

        atexit.register(cache_save)

        hits = misses = 0
        full = False
        cache_get = cache.get  # bound method to lookup a key or return None
        root = []  # root of the circular doubly linked list
        root[:] = [root, root, None, None]  # initialize by pointing to self

        if maxsize == 0:

            def wrapper(*args, **kwds):
                # No caching -- just a statistics update after a successful call
                nonlocal misses
                result = user_function(*args, **kwds)
                misses += 1
                return result

        elif maxsize is None:

            def wrapper(*args, **kwds):
                # Simple caching without ordering or size limit
                nonlocal hits, misses
                key = make_key(args, kwds, typed)
                result = cache_get(key, sentinel)
                if result is not sentinel:
                    hits += 1
                    return result
                result = user_function(*args, **kwds)
                cache[key] = result
                misses += 1
                if save_every and not misses % save_every:
                    cache_save()
                return result

        else:

            def wrapper(*args, **kwds):
                # Size limited caching that tracks accesses by recency
                nonlocal root, hits, misses, full
                key = make_key(args, kwds, typed)
                with lock:
                    link = cache_get(key)
                    if link is not None:
                        # Move the link to the front of the circular queue
                        link_prev, link_next, _key, result = link
                        link_prev[NEXT] = link_next
                        link_next[PREV] = link_prev
                        last = root[PREV]
                        last[NEXT] = root[PREV] = link
                        link[PREV] = last
                        link[NEXT] = root
                        hits += 1
                        return result
                result = user_function(*args, **kwds)
                with lock:
                    if key in cache:
                        # Getting here means that this same key was added to the
                        # cache while the lock was released.  Since the link
                        # update is already done, we need only return the
                        # computed result and update the count of misses.
                        pass
                    elif full:
                        # Use the old root to store the new key and result.
                        oldroot = root
                        oldroot[KEY] = key
                        oldroot[RESULT] = result
                        # Empty the oldest link and make it the new root.
                        # Keep a reference to the old key and old result to
                        # prevent their ref counts from going to zero during the
                        # update. That will prevent potentially arbitrary object
                        # clean-up code (i.e. __del__) from running while we're
                        # still adjusting the links.
                        root = oldroot[NEXT]
                        oldkey = root[KEY]
                        _ = root[RESULT]  # noqa: F841
                        root[KEY] = root[RESULT] = None
                        # Now update the cache dictionary.
                        del cache[oldkey]
                        # Save the potentially reentrant cache[key] assignment
                        # for last, after the root and links have been put in
                        # a consistent state.
                        cache[key] = oldroot
                    else:
                        # Put result in a new link at the front of the queue.
                        last = root[PREV]
                        link = [last, root, key, result]
                        last[NEXT] = root[PREV] = cache[key] = link
                        full = (len(cache) >= maxsize)
                    misses += 1
                    if save_every and not misses % save_every:
                        cache_save()
                return result

        def cache_info():
            """Report cache statistics"""
            with lock:
                return _CacheInfo(hits, misses, maxsize, len(cache))

        def cache_clear():
            """Clear the cache and cache statistics"""
            nonlocal hits, misses, full
            with lock:
                cache.clear()
                root[:] = [root, root, None, None]
                hits = misses = 0
                full = False

        wrapper.cache_info = cache_info
        wrapper.cache_clear = cache_clear
        wrapper.cache_save = cache_save
        wrapper.cache_filename = filename
        return update_wrapper(wrapper, user_function)
Esempio n. 15
0
 def __init__(self):
     self.terminals = dict()
     self.terminals_lock = RLock()
     self.nonterminals = set()
Esempio n. 16
0
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
    sentinel = object()
    make_key = _make_key
    PREV, NEXT, KEY, RESULT = 0, 1, 2, 3
    cache = {}
    hits = misses = 0
    full = False
    cache_get = cache.get
    cache_len = cache.__len__
    lock = RLock()
    root = []
    root[:] = [root, root, None, None]
    if maxsize == 0:

        def wrapper(*args, **kwds):
            nonlocal misses
            result = user_function(*args, **kwds)
            misses += 1
            return result
    elif maxsize is None:

        def wrapper(*args, **kwds):
            nonlocal hits, misses
            key = make_key(args, kwds, typed)
            result = cache_get(key, sentinel)
            if result is not sentinel:
                hits += 1
                return result
            result = user_function(*args, **kwds)
            cache[key] = result
            misses += 1
            return result
    else:

        def wrapper(*args, **kwds):
            nonlocal root, hits, misses, full
            key = make_key(args, kwds, typed)
            with lock:
                link = cache_get(key)
                if link is not None:
                    link_prev, link_next, _key, result = link
                    link_prev[NEXT] = link_next
                    link_next[PREV] = link_prev
                    last = root[PREV]
                    last[NEXT] = root[PREV] = link
                    link[PREV] = last
                    link[NEXT] = root
                    hits += 1
                    return result
            result = user_function(*args, **kwds)
            with lock:
                if key in cache:
                    pass
                elif full:
                    oldroot = root
                    oldroot[KEY] = key
                    oldroot[RESULT] = result
                    root = oldroot[NEXT]
                    oldkey = root[KEY]
                    oldresult = root[RESULT]
                    root[KEY] = root[RESULT] = None
                    del cache[oldkey]
                    cache[key] = oldroot
                else:
                    last = root[PREV]
                    link = [last, root, key, result]
                    last[NEXT] = root[PREV] = cache[key] = link
                    full = cache_len() >= maxsize
                misses += 1
            return result

    def cache_info():
        """Report cache statistics"""
        with lock:
            return _CacheInfo(hits, misses, maxsize, cache_len())

    def cache_clear():
        """Clear the cache and cache statistics"""
        nonlocal hits, misses, full
        with lock:
            cache.clear()
            root[:] = [root, root, None, None]
            hits = misses = 0
            full = False

    wrapper.cache_info = cache_info
    wrapper.cache_clear = cache_clear
    return wrapper
def _lru_cache_wrapper2(user_function, maxsize, typed, _CacheInfo):
    # Constants shared by all lru cache instances:
    make_key = _make_key         # build a key from the function arguments
    PREV, NEXT, KEY, RESULT = 0, 1, 2, 3   # names for the link fields

    cache = {}
    hits = misses = 0
    full = False

    cache_len = cache.__len__  # get cache size without calling len()
    lock = RLock()           # because linkedlist updates aren't threadsafe
    root = []                # root of the circular doubly linked list
    root[:] = [root, root, None, None]     # initialize by pointing to self

    
    #print(user_function, maxsize, typed, _CacheInfo)
    if maxsize == 0:

        def wrapper(*args, **kwds):
            # No caching -- just a statistics update
            nonlocal misses
            misses += 1
            result = user_function(*args, **kwds)
            return result

    elif maxsize is None:

        def wrapper(*args, **kwds):
            # Simple caching without ordering or size limit
            nonlocal hits, misses
            key = args[0]
            try:
                result = cache[key]
            except :
                result = None
            if result is not None:
                hits += 1
                return result
            misses += 1
            result = user_function(*args, **kwds)
            cache[key] = result
            return result

    else:

        def wrapper(*args, **kwds):
            # Size limited caching that tracks accesses by recency
            nonlocal root, hits, misses, full
            key = args[0]
            #print(key)
            with lock:
                try:
                    link = cache[key]
                except :
                    link = None
                #link = cache[key]
                #link = cache_get(key)
                #print(link)
                if link is not None:
                    #print("LINK")
                    # Move the link to the front of the circular queue
                    link_prev, link_next, _key, result = link
                    link_prev[NEXT] = link_next
                    link_next[PREV] = link_prev
                    last = root[PREV]
                    last[NEXT] = root[PREV] = link
                    link[PREV] = last
                    link[NEXT] = root
                    hits += 1
                    return result
                #print("MISS")
                misses += 1
            result = user_function(*args, **kwds)
            with lock:
                if key in cache:
                    #print("KEY in CACHE")
                    # Getting here means that this same key was added to the
                    # cache while the lock was released.  Since the link
                    # update is already done, we need only return the
                    # computed result and update the count of misses.
                    pass
                elif full:
                    #print("KEY in FULL")
                    # Use the old root to store the new key and result.
                    oldroot = root
                    #print(oldroot)
                    #print(key)
                    #print(result)
                    oldroot[KEY] = key
                    oldroot[RESULT] = result
                    # Empty the oldest link and make it the new root.
                    # Keep a reference to the old key and old result to
                    # prevent their ref counts from going to zero during the
                    # update. That will prevent potentially arbitrary object
                    # clean-up code (i.e. __del__) from running while we're
                    # still adjusting the links.
                    root = oldroot[NEXT]
                    oldkey = root[KEY]
                    oldresult = root[RESULT]
                    root[KEY] = root[RESULT] = None
                    # Now update the cache dictionary.
                    del cache[oldkey]
                    # Save the potentially reentrant cache[key] assignment
                    # for last, after the root and links have been put in
                    # a consistent state.
                    cache[key] = oldroot
                else:
                    # Put result in a new link at the front of the queue.
                    last = root[PREV]
                    link = [last, root, key, result]
                    last[NEXT] = root[PREV] = cache[key] = link
                    # Use the cache_len bound method instead of the len() function
                    # which could potentially be wrapped in an lru_cache itself.
                    full = (cache_len() >= maxsize)
            return result

    def cache_info():
        """Report cache statistics"""

        with lock:

            return _CacheInfo(hits, misses, maxsize, cache_len())

    def cache_clear():
        """Clear the cache and cache statistics"""

        nonlocal hits, misses, full
        with lock:
            cache.clear()
            root[:] = [root, root, None, None]
            hits = misses = 0
            full = False
    def get_cache_dictionary():
        return cache;
    def get_cache_1():
        return root;
    def get_cache_2():
        return hits;
    def get_cache_3():
        return full;
    def checkCache(userKey):
        try:
            result = cache[userKey]
            return result
        except :
            result = None
            return result
    def set_cache_dictionary(cache_1,root_1,hits_1,full_1):

        nonlocal cache,root, hits, misses, full,cache_len
        with lock:
            cache=copy.deepcopy(cache_1)
            root=copy.deepcopy(root_1)
            hits = misses = 0
            full = full_1
            cache_len = cache.__len__


    wrapper.cache_info = cache_info
    wrapper.cache_clear = cache_clear
    wrapper.get_cache_dictionary = get_cache_dictionary
    wrapper.set_cache_dictionary = set_cache_dictionary
    wrapper.get_cache_1 = get_cache_1
    wrapper.get_cache_2 = get_cache_2
    wrapper.get_cache_3 = get_cache_3
    wrapper.checkCache = checkCache
    return wrapper
Esempio n. 18
0
    def decorating_function(user_function):
        cache = {}
        hits = misses = 0
        full = False
        cache_get = cache.get
        lock = RLock()
        root = []
        root[:] = [root, root, None, None]
        if maxsize == 0:

            def wrapper(*args, **kwds):
                nonlocal misses
                result = user_function(*args, **kwds)
                misses += 1
                return result

        elif maxsize is None:

            def wrapper(*args, **kwds):
                nonlocal hits, misses
                key = make_key(args, kwds, typed)
                result = cache_get(key, sentinel)
                if result is not sentinel:
                    hits += 1
                    return result
                result = user_function(*args, **kwds)
                cache[key] = result
                misses += 1
                return result

        else:

            def wrapper(*args, **kwds):
                nonlocal hits, root, full, misses
                key = make_key(args, kwds, typed)
                with lock:
                    link = cache_get(key)
                    if link is not None:
                        (link_prev, link_next, _key, result) = link
                        link_prev[NEXT] = link_next
                        link_next[PREV] = link_prev
                        last = root[PREV]
                        last[NEXT] = root[PREV] = link
                        link[PREV] = last
                        link[NEXT] = root
                        hits += 1
                        return result
                result = user_function(*args, **kwds)
                with lock:
                    if key in cache:
                        pass
                    elif full:
                        oldroot = root
                        oldroot[KEY] = key
                        oldroot[RESULT] = result
                        root = oldroot[NEXT]
                        oldkey = root[KEY]
                        oldresult = root[RESULT]
                        root[KEY] = root[RESULT] = None
                        del cache[oldkey]
                        cache[key] = oldroot
                    else:
                        last = root[PREV]
                        link = [last, root, key, result]
                        last[NEXT] = root[PREV] = cache[key] = link
                        full = len(cache) >= maxsize
                    misses += 1
                return result

        def cache_info():
            with lock:
                return _CacheInfo(hits, misses, maxsize, len(cache))

        def cache_clear():
            nonlocal hits, misses, full
            with lock:
                cache.clear()
                root[:] = [root, root, None, None]
                hits = misses = 0
                full = False

        wrapper.cache_info = cache_info
        wrapper.cache_clear = cache_clear
        return update_wrapper(wrapper, user_function)
Esempio n. 19
0
 def __init__(self, func):
     self.func = func
     self.attrname = None
     self.__doc__ = func.__doc__
     self.lock = RLock()
Esempio n. 20
0
                                has_default_value=False,
                                json_name=json_name)
        fields.append(field)

    desc_name = '.'.join(full_message_name)
    return Descriptor(desc_proto.name,
                      desc_name,
                      None,
                      None,
                      fields,
                      list(nested_types.values()),
                      list(enum_types.values()), [],
                      options=_OptionsOrNone(desc_proto))


_lock = RLock()
_jclass_pclass_map = {}


class PBClass(object):
    def __init__(self, java_class, pool=_default_descriptor_pool):
        self._descriptor_pool = pool
        self._java_class = java_class
        jdesc_bytes = java_class.getMethod("getDescriptor", None).invoke(
            None, None).toProto().toByteArray()
        if PY2:
            jdesc_bytes = str(jdesc_bytes)
        pdesc_proto = DescriptorProto()
        pdesc_proto.MergeFromString(jdesc_bytes)
        try:
            pdesc_proto_desc = self._descriptor_pool.FindFileContainingSymbol(
Esempio n. 21
0
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
    # Constants shared by all lru cache instances:
    sentinel = object()  # unique object used to signal cache misses
    make_key = _make_key  # build a key from the function arguments
    PREV, NEXT, KEY, RESULT = 0, 1, 2, 3  # names for the link fields

    cache = {}
    hits = misses = 0
    full = False
    cache_get = cache.get  # bound method to lookup a key or return None
    cache_len = cache.__len__  # get cache size without calling len()
    lock = RLock()  # because linkedlist updates aren't threadsafe
    root = []  # root of the circular doubly linked list
    root[:] = [root, root, None, None]  # initialize by pointing to self

    if maxsize == 0:

        def wrapper(*args, **kwds):
            # No caching -- just a statistics update
            nonlocal misses
            misses += 1
            result = user_function(*args, **kwds)
            return result

    elif maxsize is None:

        def wrapper(*args, **kwds):
            # Simple caching without ordering or size limit
            nonlocal hits, misses
            key = make_key(args, kwds, typed)
            result = cache_get(key, sentinel)
            if result is not sentinel:
                hits += 1
                return result
            misses += 1
            result = user_function(*args, **kwds)
            cache[key] = result
            return result

    else:

        def wrapper(*args, **kwds):
            # Size limited caching that tracks accesses by recency
            nonlocal root, hits, misses, full
            key = make_key(args, kwds, typed)
            with lock:
                link = cache_get(key)
                if link is not None:
                    # Move the link to the front of the circular queue
                    link_prev, link_next, _key, result = link
                    link_prev[NEXT] = link_next
                    link_next[PREV] = link_prev
                    last = root[PREV]
                    last[NEXT] = root[PREV] = link
                    link[PREV] = last
                    link[NEXT] = root
                    hits += 1
                    return result
                misses += 1
            result = user_function(*args, **kwds)
            with lock:
                if key in cache:
                    # Getting here means that this same key was added to the
                    # cache while the lock was released.  Since the link
                    # update is already done, we need only return the
                    # computed result and update the count of misses.
                    pass
                elif full:
                    # Use the old root to store the new key and result.
                    oldroot = root
                    oldroot[KEY] = key
                    oldroot[RESULT] = result
                    # Empty the oldest link and make it the new root.
                    # Keep a reference to the old key and old result to
                    # prevent their ref counts from going to zero during the
                    # update. That will prevent potentially arbitrary object
                    # clean-up code (i.e. __del__) from running while we're
                    # still adjusting the links.
                    root = oldroot[NEXT]
                    oldkey = root[KEY]
                    oldresult = root[RESULT]
                    root[KEY] = root[RESULT] = None
                    # Now update the cache dictionary.
                    del cache[oldkey]
                    # Save the potentially reentrant cache[key] assignment
                    # for last, after the root and links have been put in
                    # a consistent state.
                    cache[key] = oldroot
                else:
                    # Put result in a new link at the front of the queue.
                    last = root[PREV]
                    link = [last, root, key, result]
                    last[NEXT] = root[PREV] = cache[key] = link
                    # Use the cache_len bound method instead of the len() function
                    # which could potentially be wrapped in an lru_cache itself.
                    full = (cache_len() >= maxsize)
            return result

    def cache_info():
        """Report cache statistics"""
        with lock:
            return _CacheInfo(hits, misses, maxsize, cache_len())

    def cache_clear():
        """Clear the cache and cache statistics"""
        nonlocal hits, misses, full
        with lock:
            cache.clear()
            root[:] = [root, root, None, None]
            hits = misses = 0
            full = False

    wrapper.cache_info = cache_info
    wrapper.cache_clear = cache_clear
    return wrapper
Esempio n. 22
0
 def __init__(self, max_size=100):
     self.cache = OrderedDict()
     self.max_size = max_size
     self.lock = RLock()  # because updates aren't threadsafe