Exemplo n.º 1
0
    def __init__(
        self,
        host: str,
        port: int,
        privkey: bytes,
        gasprice: int = None,
        nonce_update_interval: float = 5.0,
        nonce_offset: int = 0,
    ):

        if privkey is None or len(privkey) != 32:
            raise ValueError('Invalid private key')

        endpoint = 'http://{}:{}'.format(host, port)

        self.port = port
        self.privkey = privkey
        self.sender = privatekey_to_address(privkey)
        # Needs to be initialized to None in the beginning since JSONRPCClient
        # gets constructed before the RaidenService Object.
        self.stop_event = None

        self.nonce_last_update = 0
        self.nonce_available_value = None
        self.nonce_lock = Semaphore()
        self.nonce_update_interval = nonce_update_interval
        self.nonce_offset = nonce_offset
        self.given_gas_price = gasprice

        cache = cachetools.TTLCache(
            maxsize=1,
            ttl=RPC_CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.gaslimit = cache_wrapper(self._gaslimit)
        cache = cachetools.TTLCache(
            maxsize=1,
            ttl=RPC_CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.gasprice = cache_wrapper(self._gasprice)

        # web3
        self.web3: Web3 = Web3(HTTPProvider(endpoint))
        # we use a PoA chain for smoketest, use this middleware to fix this
        self.web3.middleware_stack.inject(geth_poa_middleware, layer=0)

        # create the connection test middleware
        connection_test = make_connection_test_middleware(self)
        self.web3.middleware_stack.inject(connection_test, layer=0)
Exemplo n.º 2
0
    def __init__(self,
                 host: str,
                 port: int,
                 privkey: bytes,
                 gasprice: int = None,
                 nonce_update_interval: float = 5.0,
                 nonce_offset: int = 0):

        if privkey is None or len(privkey) != 32:
            raise ValueError('Invalid private key')

        endpoint = 'http://{}:{}'.format(host, port)
        self.session = requests.Session()
        adapter = requests.adapters.HTTPAdapter(pool_maxsize=50)
        self.session.mount(endpoint, adapter)

        self.transport = HttpPostClientTransport(
            endpoint,
            post_method=self.session.post,
            headers={'content-type': 'application/json'},
        )

        self.port = port
        self.privkey = privkey
        self.protocol = JSONRPCProtocol()
        self.sender = privatekey_to_address(privkey)
        # Needs to be initialized to None in the beginning since JSONRPCClient
        # gets constructed before the RaidenService Object.
        self.stop_event = None

        self.nonce_last_update = 0
        self.nonce_available_value = None
        self.nonce_lock = Semaphore()
        self.nonce_update_interval = nonce_update_interval
        self.nonce_offset = nonce_offset
        self.given_gas_price = gasprice

        cache = cachetools.TTLCache(
            maxsize=1,
            ttl=RPC_CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.gaslimit = cache_wrapper(self._gaslimit)
        cache = cachetools.TTLCache(
            maxsize=1,
            ttl=RPC_CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.gasprice = cache_wrapper(self._gasprice)
Exemplo n.º 3
0
    def test_decorator_typed(self):
        cache = self.cache(3)
        key = cachetools.keys.typedkey
        wrapper = cachetools.cached(cache, key=key)(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 1)
        self.assertIn(cachetools.keys.typedkey(0), cache)
        self.assertNotIn(cachetools.keys.typedkey(1), cache)
        self.assertNotIn(cachetools.keys.typedkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)
        self.assertIn(cachetools.keys.typedkey(0), cache)
        self.assertIn(cachetools.keys.typedkey(1), cache)
        self.assertNotIn(cachetools.keys.typedkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)

        self.assertEqual(wrapper(1.0), 2)
        self.assertEqual(len(cache), 3)
        self.assertIn(cachetools.keys.typedkey(0), cache)
        self.assertIn(cachetools.keys.typedkey(1), cache)
        self.assertIn(cachetools.keys.typedkey(1.0), cache)

        self.assertEqual(wrapper(1.0), 2)
        self.assertEqual(len(cache), 3)
Exemplo n.º 4
0
    def __init__(self, discovery, udpsocket, throttle_policy, config):
        # these values are initialized by the start method
        self.queueids_to_queues: typing.Dict
        self.raiden: RaidenService

        self.discovery = discovery
        self.config = config

        self.retry_interval = config['retry_interval']
        self.retries_before_backoff = config['retries_before_backoff']
        self.nat_keepalive_retries = config['nat_keepalive_retries']
        self.nat_keepalive_timeout = config['nat_keepalive_timeout']
        self.nat_invitation_timeout = config['nat_invitation_timeout']

        self.event_stop = Event()

        self.greenlets = list()
        self.addresses_events = dict()

        self.messageids_to_asyncresults = dict()

        # Maps the addresses to a dict with the latest nonce (using a dict
        # because python integers are immutable)
        self.nodeaddresses_to_nonces = dict()

        cache = cachetools.TTLCache(
            maxsize=50,
            ttl=CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.get_host_port = cache_wrapper(discovery.get)

        self.throttle_policy = throttle_policy
        self.server = DatagramServer(udpsocket, handle=self._receive)
Exemplo n.º 5
0
    def test_decorator_typed(self):
        cache = self.cache(3)
        key = cachetools.keys.typedkey
        wrapper = cachetools.cached(cache, key=key)(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 1)
        self.assertIn(cachetools.keys.typedkey(0), cache)
        self.assertNotIn(cachetools.keys.typedkey(1), cache)
        self.assertNotIn(cachetools.keys.typedkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)
        self.assertIn(cachetools.keys.typedkey(0), cache)
        self.assertIn(cachetools.keys.typedkey(1), cache)
        self.assertNotIn(cachetools.keys.typedkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)

        self.assertEqual(wrapper(1.0), 2)
        self.assertEqual(len(cache), 3)
        self.assertIn(cachetools.keys.typedkey(0), cache)
        self.assertIn(cachetools.keys.typedkey(1), cache)
        self.assertIn(cachetools.keys.typedkey(1.0), cache)

        self.assertEqual(wrapper(1.0), 2)
        self.assertEqual(len(cache), 3)
Exemplo n.º 6
0
    def test_decorator(self):
        wrapper = cachetools.cached(None)(self.func)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), (0,))
        self.assertEqual(wrapper(1), (1,))
        self.assertEqual(wrapper(1, foo='bar'), (1, ('foo', 'bar')))
Exemplo n.º 7
0
    def test_decorator(self):
        wrapper = cachetools.cached(None)(self.func)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), (0, ))
        self.assertEqual(wrapper(1), (1, ))
        self.assertEqual(wrapper(1, foo='bar'), (1, ('foo', 'bar')))
Exemplo n.º 8
0
    def __init__(self,
                 *,
                 cookie_path=None,
                 cache_path=None,
                 cache_ttl=300,
                 cache_size=100,
                 logger=None):
        self.session = requests.Session()
        self.logger = logger or logging.getLogger('NetEaseApi')

        # cookies persistent
        self._cookies_path = cookie_path
        self.session.cookies = _initialize_cookies(cookie_path)

        # cache persistent
        self._cache_path = cache_path
        self._cache_ttl = cache_ttl
        self._cache_size = cache_size
        if cache_path:
            self.request_cache = TTLCacheP(cache_size, cache_ttl, cache_path)
        else:
            self.request_cache = TTLCache(cache_size, cache_ttl)

        self.request = cached(self.request_cache, cache_key)(self._request)

        # get login status
        resp = self.get_user_account()
        self.profile: UserProfile = resp['profile']
        self.account: UserAccount = resp['account']
    def test_decorator(self):
        wrapper = cachetools.cached(None)(self.func)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), (0, ))
        self.assertEqual(wrapper(1), (1, ))
        self.assertEqual(wrapper(1, foo="bar"), (1, ("foo", "bar")))
Exemplo n.º 10
0
    def __init__(self, db, backoff='NONE',
                 norm_map=DEFAULT_NORMALIZE_MAP,
                 strict_digit=False,
                 cache_size=0):
        if not isinstance(db, CalimaStarDB):
            raise AnalyzerError('DB is not an instance of CalimaStarDB')
        if not db.flags.analysis:
            raise AnalyzerError('DB does not support analysis')

        self._db = db

        self._backoff = backoff
        self._norm_map = DEFAULT_NORMALIZE_MAP
        self._strict_digit = strict_digit

        if backoff in _BACKOFF_TYPES:
            if backoff == 'NONE':
                self._backoff_condition = None
                self._backoff_action = None
            else:
                backoff_toks = backoff.split('_')
                self._backoff_condition = backoff_toks[0]
                self._backoff_action = backoff_toks[1]
        else:
            raise AnalyzerError('Invalid backoff mode {}'.format(
                repr(backoff)))

        if isinstance(cache_size, int):
            if cache_size > 0:
                cache = LFUCache(cache_size)
                self.analyze = cached(cache, lock=RLock())(self.analyze)

        else:
            raise AnalyzerError('Invalid cache size {}'.format(
                                repr(cache_size)))
Exemplo n.º 11
0
    def test_decorator(self):
        cache = self.cache(2)
        wrapper = cachetools.cached(cache)(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 1)
        self.assertIn(cachetools.hashkey(0), cache)
        self.assertNotIn(cachetools.hashkey(1), cache)
        self.assertNotIn(cachetools.hashkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)
        self.assertIn(cachetools.hashkey(0), cache)
        self.assertIn(cachetools.hashkey(1), cache)
        self.assertIn(cachetools.hashkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)

        self.assertEqual(wrapper(1.0), 1)
        self.assertEqual(len(cache), 2)

        self.assertEqual(wrapper(1.0), 1)
        self.assertEqual(len(cache), 2)
Exemplo n.º 12
0
    def __init__(self, analyzer, mle_path=None, top=1, cache_size=100000):
        if not isinstance(analyzer, Analyzer):
            raise ValueError('Invalid analyzer instance.')
        if not isinstance(top, int):
            raise ValueError('Invalid value for top.')
        if not isinstance(cache_size, int):
            raise ValueError('Invalid value for cache_size.')

        if mle_path is not None:
            with open(mle_path, 'r', encoding='utf-8') as mle_fp:
                self._mle = json.load(mle_fp)
        else:
            self._mle = None

        self._analyzer = analyzer

        if top < 1:
            top = 1
        self._top = top

        if cache_size < 0:
            cache_size = 0

        self._cache = LFUCache(cache_size)
        self._scored_analyses = cached(self._cache)(self._scored_analyses)
Exemplo n.º 13
0
def cacheit(f):
    """Caching decorator.

    The result of cached function must be *immutable*.

    Examples
    ========

    >>> from diofant.core.cache import cacheit
    >>> from diofant.abc import x, y

    >>> @cacheit
    ... def f(a, b):
    ...    print(a, b)
    ...    return a + b

    >>> f(x, y)
    x y
    x + y
    >>> f(x, y)
    x + y
    """

    if USE_CACHE:
        f_cache_it_cache = {}
        CACHE.append((f, f_cache_it_cache))
        return cached(f_cache_it_cache, key=cache_key)(f)
    else:
        return f
Exemplo n.º 14
0
    def __init__(self, discovery, udpsocket, throttle_policy, config):
        # these values are initialized by the start method
        self.queueids_to_queues: typing.Dict
        self.raiden: RaidenService

        self.discovery = discovery
        self.config = config

        self.retry_interval = config['retry_interval']
        self.retries_before_backoff = config['retries_before_backoff']
        self.nat_keepalive_retries = config['nat_keepalive_retries']
        self.nat_keepalive_timeout = config['nat_keepalive_timeout']
        self.nat_invitation_timeout = config['nat_invitation_timeout']

        self.event_stop = RaidenGreenletEvent()

        self.greenlets = list()
        self.addresses_events = dict()

        self.messageids_to_asyncresults = dict()

        # Maps the addresses to a dict with the latest nonce (using a dict
        # because python integers are immutable)
        self.nodeaddresses_to_nonces = dict()

        cache = cachetools.TTLCache(
            maxsize=50,
            ttl=CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.get_host_port = cache_wrapper(discovery.get)

        self.throttle_policy = throttle_policy
        self.server = DatagramServer(udpsocket, handle=self._receive)
Exemplo n.º 15
0
    def __init__(self, transport, discovery, raiden, retry_interval,
                 retries_before_backoff, nat_keepalive_retries,
                 nat_keepalive_timeout, nat_invitation_timeout):

        self.transport = transport
        self.discovery = discovery
        self.raiden = raiden

        self.retry_interval = retry_interval
        self.retries_before_backoff = retries_before_backoff

        self.nat_keepalive_retries = nat_keepalive_retries
        self.nat_keepalive_timeout = nat_keepalive_timeout
        self.nat_invitation_timeout = nat_invitation_timeout

        self.event_stop = Event()

        self.queueid_to_queue = dict()
        self.greenlets = list()
        self.addresses_events = dict()

        # Maps the message_id to a SentMessageState
        self.messageids_to_asyncresults = dict()

        # Maps the addresses to a dict with the latest nonce (using a dict
        # because python integers are immutable)
        self.nodeaddresses_to_nonces = dict()

        cache = cachetools.TTLCache(
            maxsize=50,
            ttl=CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.get_host_port = cache_wrapper(discovery.get)
Exemplo n.º 16
0
    def __init__(self, analyzer, mle_path=None, top=1, cache_size=100000):
        if not isinstance(analyzer, Analyzer):
            raise ValueError('Invalid analyzer instance.')
        if not isinstance(top, int):
            raise ValueError('Invalid value for top.')
        if not isinstance(cache_size, int):
            raise ValueError('Invalid value for cache_size.')

        if mle_path is not None:
            with open(mle_path, 'r', encoding='utf-8') as mle_fp:
                self._mle = json.load(mle_fp)

                # TODO: Remove this when MLE files are fixed
                for analysis in self._mle.values():
                    analysis['lex'] = strip_lex(analysis['lex'])
        else:
            self._mle = None

        self._analyzer = analyzer

        if top < 1:
            top = 1
        self._top = top

        if cache_size < 0:
            cache_size = 0

        self._cache = LFUCache(cache_size)
        self._scored_analyses = cached(self._cache)(self._scored_analyses)
Exemplo n.º 17
0
    def test_decorator(self):
        cache = self.cache(2)
        wrapper = cachetools.cached(cache)(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 1)
        self.assertIn(cachetools.keys.hashkey(0), cache)
        self.assertNotIn(cachetools.keys.hashkey(1), cache)
        self.assertNotIn(cachetools.keys.hashkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)
        self.assertIn(cachetools.keys.hashkey(0), cache)
        self.assertIn(cachetools.keys.hashkey(1), cache)
        self.assertIn(cachetools.keys.hashkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)

        self.assertEqual(wrapper(1.0), 1)
        self.assertEqual(len(cache), 2)

        self.assertEqual(wrapper(1.0), 1)
        self.assertEqual(len(cache), 2)
Exemplo n.º 18
0
def cache_test(specific_cache, maxNum=100):
    temp = Fib()
    temp.num = cachetools.cached(cache=specific_cache, key=strhash)(temp.num)
    temp.num(maxNum)
    if maxNum >= 3:
        assert temp.num(3) == 2
    if maxNum >= 100:
        assert temp.num(100) == 354224848179261915075
Exemplo n.º 19
0
 def wrapper(*args, **kwargs):
     kwargs[Task.SERVICE_KWARGS_PREFIX +
            'func_name_for_cache'] = func.__name__
     k = key(*args, **kwargs)
     t = cache.get(k)
     if t and t.running is False and t.exception is not None:
         cache.pop(k)
     return cached(cache, key, lock)(func)(*args, **kwargs)
Exemplo n.º 20
0
    def test_zero_size_cache_decorator(self):
        cache = self.cache(0)
        wrapper = cachetools.cached(cache)(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 0)
Exemplo n.º 21
0
    def test_zero_size_cache_decorator(self):
        cache = self.cache(0)
        wrapper = cachetools.cached(cache)(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 0)
Exemplo n.º 22
0
 def __getattr__(self, item):
     if item not in ('pdf', 'logpdf', 'cdf', 'logcdf', 'ppf', 'isf', 'sf',
                     'logsf'):
         return super(ScipyDistribution, self).__getattr__(item)
     f = getattr(self.base_module, item)
     g = partial(self._wrapper, f)
     g = cachetools.cached(self._cache, key=hash_with_series)(g)
     self.__dict__[item] = g
     return g
Exemplo n.º 23
0
    def load_model(model_dir, leaf_cache_size=10, points_cache_size=100000):
        """
        Loads a model from `index_dir` and returns an instance of `HKMNearestNeighbor`
        :param model_dir location where model is saved
        :param leaf_cache_size how many leaves to keep in the cache
        :param points_cache_size how many individual points to keep in cache
        """
        # load skeleton
        file_name = os.path.join(model_dir, 'skeleton.pickle')
        new_hkmnn_model = joblib.load(file_name)
        new_hkmnn_model.model_dir = model_dir
        # compute inverse index
        new_hkmnn_model.inverse_idx = new_hkmnn_model._get_idx_paths()

        # cache calls to get_vector and _get_nn_model
        get_nn_model_cache = LRUCache(maxsize=leaf_cache_size)
        get_vector_cache = LRUCache(maxsize=points_cache_size)
        new_hkmnn_model.get_vector = cached(get_vector_cache)(new_hkmnn_model.get_vector)
        new_hkmnn_model._get_nn_model = cached(get_nn_model_cache)(new_hkmnn_model._get_nn_model)

        return new_hkmnn_model
Exemplo n.º 24
0
def _get_converter():
    global Args
    
    try:
        mod = __import__(os.path.splitext(Args.patterns)[0])
    except:
        raise

    conv = Soros.compile(mod.__doc__)
    if Args.cached:
        conv.lock = RLock()
        conv.cache = TTLCache(maxsize=10**5, ttl=60)
        conv._run = cached(cache=conv.cache, lock=conv.lock)(conv._run)
    return conv
Exemplo n.º 25
0
    def _generalized_lru_cache(function):
        '''Actual decorator'''
        def hash_keys(*args, **kwargs):
            def generalized_hash(arg):
                '''Hashes objects that are not hashable by default'''
                return hash(get_hashable_object(arg))

            args = tuple(generalized_hash(arg) for arg in args)
            kwargs = {k: generalized_hash(v) for k, v in kwargs.items()}
            return cachetools.keys.hashkey(*args, **kwargs)

        return cachetools.cached(
            cache=cachetools.LRUCache(maxsize=maxsize),
            key=hash_keys,
        )(function)
Exemplo n.º 26
0
    def _get_param(self, key: str) -> Any:
        """Get (possibly) cached ROS parameter.

        Arguments:
            key (str): Name of the ROS parameter

        Returns:
            If the parameter is in the parameter cache, the cached value is returned.
            Otherwise rospy.get_param(key) is returned.
        """
        # Cached version of rospy.get_param:
        get_cached_param = cached(cache=self._parameter_cache)(rospy.get_param)

        # request param
        return get_cached_param(key)
    def __init__(self, farm_os_url):
        farm_os_client_creation_lock = defer.DeferredLock()

        self._create_farm_os_client = partial(
            farm_os_client_creation_lock.run,
            lru_cache(maxsize=CLIENT_INSTANCE_CACHE_SIZE)(partial(
                TxFarmOsClient.create,
                farm_os_url,
                user_agent="FarmOsAreaFeatureProxy")))

        all_areas_cache_lock = defer.DeferredLock()
        self._get_all_areas_cache_cell = partial(
            all_areas_cache_lock.run,
            cached(
                cache=TTLCache(maxsize=CLIENT_INSTANCE_CACHE_SIZE,
                               ttl=AREAS_CACHE_SECONDS))(_AllAreasCacheCell))
Exemplo n.º 28
0
def lru_cache_numpy(func):
    """ numpy friendly caching """
    maxsize = 128
    cache = cachetools.LRUCache(maxsize=maxsize)

    def hashing_first_numpy_arg(*args, **kwargs):
        """ sum up the hash of all the arguments """
        hash_total = 0
        for x in [*args, *kwargs.values()]:
            if isinstance(x, np.ndarray):
                hash_total += hash(x.data.tobytes())
            else:
                hash_total += hash(x)
        return hash_total

    return cachetools.cached(cache, hashing_first_numpy_arg)(func)
Exemplo n.º 29
0
class PortEntity:
    @classmethod
    def entity_type(cls):
        return ClassTool.class2fullpath(cls)

    @classmethod
    def text2norm(cls, text): return str2lower(text)

    @classmethod
    # @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=HenriqueLocale.lang_count()))
    @cached(cache=TTLCache(maxsize=HenriqueLocale.lang_count(), ttl=HenriqueEntity.Cache.DEFAULT_TTL))
    def lang2matcher(cls, lang):
        langs_recognizable = HenriqueLocale.lang2langs_recognizable(lang)

        h_codename2aliases = merge_dicts([{Port.port2codename(port): Port.port_langs2aliases(port, langs_recognizable)}
                                          for port in Port.list_all()],
                                         vwrite=vwrite_no_duplicate_key)

        config = {GazetteerMatcher.Config.Key.NORMALIZER: cls.text2norm,
                  GazetteerMatcher.Config.Key.TEXTS2PATTERN: HenriqueEntity.texts2pattern_port_tradegood,
                  }
        matcher = GazetteerMatcher(h_codename2aliases, config)
        return matcher

    @classmethod
    # @CacheTool.cache2hashable(cache=lru_cache(maxsize=HenriqueEntity.Cache.DEFAULT_SIZE),
    #                           f_pair=CacheTool.JSON.func_pair(), )
    @CacheTool.cache2hashable(cache=cached(cache=TTLCache(maxsize=HenriqueEntity.Cache.DEFAULT_SIZE,
                                             ttl=HenriqueEntity.Cache.DEFAULT_TTL),
                                           ),
                              f_pair=CacheTool.JSON.func_pair(),)
    def text2entity_list(cls, text_in, config=None):
        locale = HenriqueEntity.Config.config2locale(config) or HenriqueLocale.DEFAULT
        lang = LocaleTool.locale2lang(locale) or LocaleTool.locale2lang(HenriqueLocale.DEFAULT)

        matcher = cls.lang2matcher(lang)
        span_value_list = list(matcher.text2span_value_iter(text_in))

        entity_list = [{FoxylibEntity.Field.SPAN: span,
                        FoxylibEntity.Field.TEXT: StringTool.str_span2substr(text_in, span),
                        FoxylibEntity.Field.VALUE: value,
                        FoxylibEntity.Field.TYPE: cls.entity_type(),
                        }
                       for span, value in span_value_list]

        return entity_list
Exemplo n.º 30
0
    def __init__(
            self,
            transport,
            discovery,
            raiden,
            retry_interval,
            retries_before_backoff,
            nat_keepalive_retries,
            nat_keepalive_timeout,
            nat_invitation_timeout):

        self.transport = transport
        self.discovery = discovery
        self.raiden = raiden

        self.retry_interval = retry_interval
        self.retries_before_backoff = retries_before_backoff

        self.nat_keepalive_retries = nat_keepalive_retries
        self.nat_keepalive_timeout = nat_keepalive_timeout
        self.nat_invitation_timeout = nat_invitation_timeout

        self.event_stop = Event()

        self.channel_queue = dict()  # TODO: Change keys to the channel address
        self.greenlets = list()
        self.addresses_events = dict()
        self.nodeaddresses_networkstatuses = defaultdict(lambda: NODE_NETWORK_UNKNOWN)

        # Maps the echohash of received and *sucessfully* processed messages to
        # its Ack, used to ignored duplicate messages and resend the Ack.
        self.receivedhashes_to_acks = dict()

        # Maps the echohash to a SentMessageState
        self.senthashes_to_states = dict()

        # Maps the addresses to a dict with the latest nonce (using a dict
        # because python integers are immutable)
        self.nodeaddresses_to_nonces = dict()

        cache = cachetools.TTLCache(
            maxsize=50,
            ttl=CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.get_host_port = cache_wrapper(discovery.get)
Exemplo n.º 31
0
    def __init__(
            self,
            transport,
            discovery,
            raiden,
            retry_interval,
            retries_before_backoff,
            nat_keepalive_retries,
            nat_keepalive_timeout,
            nat_invitation_timeout):

        self.transport = transport
        self.discovery = discovery
        self.raiden = raiden

        self.retry_interval = retry_interval
        self.retries_before_backoff = retries_before_backoff

        self.nat_keepalive_retries = nat_keepalive_retries
        self.nat_keepalive_timeout = nat_keepalive_timeout
        self.nat_invitation_timeout = nat_invitation_timeout

        self.event_stop = Event()

        self.channel_queue = dict()  # TODO: Change keys to the channel address
        self.greenlets = list()
        self.addresses_events = dict()
        self.nodeaddresses_networkstatuses = defaultdict(lambda: NODE_NETWORK_UNKNOWN)

        # Maps the echohash of received and *sucessfully* processed messages to
        # its Ack, used to ignored duplicate messages and resend the Ack.
        self.receivedhashes_to_acks = dict()

        # Maps the echohash to a SentMessageState
        self.senthashes_to_states = dict()

        # Maps the addresses to a dict with the latest nonce (using a dict
        # because python integers are immutable)
        self.nodeaddresses_to_nonces = dict()

        cache = cachetools.TTLCache(
            maxsize=50,
            ttl=CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.get_host_port = cache_wrapper(discovery.get)
Exemplo n.º 32
0
    def test_zero_size_cache_decorator_lock(self):
        class Lock(object):

            count = 0

            def __enter__(self):
                Lock.count += 1

            def __exit__(self, *exc):
                pass

        cache = self.cache(0)
        wrapper = cachetools.cached(cache, lock=Lock())(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 0)
        self.assertEqual(Lock.count, 2)
Exemplo n.º 33
0
    def test_zero_size_cache_decorator_lock(self):
        class Lock(object):

            count = 0

            def __enter__(self):
                Lock.count += 1

            def __exit__(self, *exc):
                pass

        cache = self.cache(0)
        wrapper = cachetools.cached(cache, lock=Lock())(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 0)
        self.assertEqual(Lock.count, 2)
Exemplo n.º 34
0
    def __init__(self, transport, discovery, raiden, retry_interval,
                 retries_before_backoff, nat_keepalive_retries,
                 nat_keepalive_timeout, nat_invitation_timeout):

        self.transport = transport
        self.discovery = discovery
        self.raiden = raiden

        self.retry_interval = retry_interval
        self.retries_before_backoff = retries_before_backoff

        self.nat_keepalive_retries = nat_keepalive_retries
        self.nat_keepalive_timeout = nat_keepalive_timeout
        self.nat_invitation_timeout = nat_invitation_timeout

        self.event_stop = Event()

        self.channel_queue = dict()  # TODO: Change keys to the channel address
        self.greenlets = list()
        self.addresses_events = dict()

        # Maps received and *sucessfully* processed message ids to the
        # corresponding `Processed` message. Used to ignored duplicate messages
        # and resend the `Processed` message.
        self.messageids_to_processedmessages = dict()

        # Maps the message_id to a SentMessageState
        self.messageids_to_states = dict()

        # Maps the addresses to a dict with the latest nonce (using a dict
        # because python integers are immutable)
        self.nodeaddresses_to_nonces = dict()

        cache = cachetools.TTLCache(
            maxsize=50,
            ttl=CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.get_host_port = cache_wrapper(discovery.get)
Exemplo n.º 35
0
    def __init__(self, address, discovery, udpsocket, throttle_policy, config):
        super().__init__()
        # these values are initialized by the start method
        self.queueids_to_queues: Dict = dict()
        self.raiden: RaidenService
        self.message_handler: MessageHandler

        self.discovery = discovery
        self.config = config
        self.address = address

        self.retry_interval = config["retry_interval"]
        self.retries_before_backoff = config["retries_before_backoff"]
        self.nat_keepalive_retries = config["nat_keepalive_retries"]
        self.nat_keepalive_timeout = config["nat_keepalive_timeout"]
        self.nat_invitation_timeout = config["nat_invitation_timeout"]

        self.event_stop = Event()
        self.event_stop.set()

        self.greenlets = list()
        self.addresses_events = dict()

        self.messageids_to_asyncresults = dict()

        # Maps the addresses to a dict with the latest nonce (using a dict
        # because python integers are immutable)
        self.nodeaddresses_to_nonces = dict()

        cache = cachetools.TTLCache(maxsize=50, ttl=CACHE_TTL)
        cache_wrapper = cachetools.cached(cache=cache)
        self.get_host_port = cache_wrapper(discovery.get)

        self.throttle_policy = throttle_policy
        pool = gevent.pool.Pool()
        self.server = DatagramServer(udpsocket,
                                     handle=self.receive,
                                     spawn=pool)
Exemplo n.º 36
0
    def __init__(self, transport, discovery, raiden, retry_interval,
                 retries_before_backoff, nat_keepalive_retries,
                 nat_keepalive_timeout, nat_invitation_timeout):

        self.transport = transport
        self.discovery = discovery
        self.raiden = raiden

        self.retry_interval = retry_interval
        self.retries_before_backoff = retries_before_backoff

        self.nat_keepalive_retries = nat_keepalive_retries
        self.nat_keepalive_timeout = nat_keepalive_timeout
        self.nat_invitation_timeout = nat_invitation_timeout

        self.event_stop = Event()

        self.channel_queue = dict()  # TODO: Change keys to the channel address
        self.greenlets = list()
        self.addresses_events = dict()
        self.nodeaddresses_networkstatuses = defaultdict(
            lambda: NODE_NETWORK_UNKNOWN)

        # TODO: remove old ACKs from the dict to free memory
        # The Ack for a processed message, used to avoid re-processing a known
        # message
        self.echohash_acks = dict()

        # Maps the echo hash `sha3(message + address)` to a WaitAck tuple
        self.echohash_asyncresult = dict()

        cache = cachetools.TTLCache(
            maxsize=50,
            ttl=CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.get_host_port = cache_wrapper(discovery.get)
Exemplo n.º 37
0
    def test_decorator_typed(self):
        cache = self.cache(3)

        def typedkey(*args, **kwargs):
            key = cachetools.hashkey(*args, **kwargs)
            key += tuple(type(v) for v in args)
            key += tuple(type(v) for _, v in sorted(kwargs.items()))
            return key

        wrapper = cachetools.cached(cache, key=typedkey)(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 1)
        self.assertIn(typedkey(0), cache)
        self.assertNotIn(typedkey(1), cache)
        self.assertNotIn(typedkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)
        self.assertIn(typedkey(0), cache)
        self.assertIn(typedkey(1), cache)
        self.assertNotIn(typedkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)

        self.assertEqual(wrapper(1.0), 2)
        self.assertEqual(len(cache), 3)
        self.assertIn(typedkey(0), cache)
        self.assertIn(typedkey(1), cache)
        self.assertIn(typedkey(1.0), cache)

        self.assertEqual(wrapper(1.0), 2)
        self.assertEqual(len(cache), 3)
Exemplo n.º 38
0
    def test_decorator_typed(self):
        cache = self.cache(3)

        def typedkey(*args, **kwargs):
            key = cachetools.hashkey(*args, **kwargs)
            key += tuple(type(v) for v in args)
            key += tuple(type(v) for _, v in sorted(kwargs.items()))
            return key
        wrapper = cachetools.cached(cache, key=typedkey)(self.func)

        self.assertEqual(len(cache), 0)
        self.assertEqual(wrapper.__wrapped__, self.func)

        self.assertEqual(wrapper(0), 0)
        self.assertEqual(len(cache), 1)
        self.assertIn(typedkey(0), cache)
        self.assertNotIn(typedkey(1), cache)
        self.assertNotIn(typedkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)
        self.assertIn(typedkey(0), cache)
        self.assertIn(typedkey(1), cache)
        self.assertNotIn(typedkey(1.0), cache)

        self.assertEqual(wrapper(1), 1)
        self.assertEqual(len(cache), 2)

        self.assertEqual(wrapper(1.0), 2)
        self.assertEqual(len(cache), 3)
        self.assertIn(typedkey(0), cache)
        self.assertIn(typedkey(1), cache)
        self.assertIn(typedkey(1.0), cache)

        self.assertEqual(wrapper(1.0), 2)
        self.assertEqual(len(cache), 3)
Exemplo n.º 39
0
    def __init__(
            self,
            host: str,
            port: int,
            privkey: bytes,
            gasprice: int = None,
            nonce_update_interval: float = 5.0,
            nonce_offset: int = 0,
            web3: Web3 = None,
    ):

        if privkey is None or len(privkey) != 32:
            raise ValueError('Invalid private key')

        endpoint = 'http://{}:{}'.format(host, port)

        self.port = port
        self.privkey = privkey
        self.sender = privatekey_to_address(privkey)
        # Needs to be initialized to None in the beginning since JSONRPCClient
        # gets constructed before the RaidenService Object.
        self.stop_event = None

        self.nonce_last_update = 0
        self.nonce_available_value = None
        self.nonce_lock = Semaphore()
        self.nonce_update_interval = nonce_update_interval
        self.nonce_offset = nonce_offset
        self.given_gas_price = gasprice

        cache = cachetools.TTLCache(
            maxsize=1,
            ttl=RPC_CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.gaslimit = cache_wrapper(self._gaslimit)
        cache = cachetools.TTLCache(
            maxsize=1,
            ttl=RPC_CACHE_TTL,
        )
        cache_wrapper = cachetools.cached(cache=cache)
        self.gasprice = cache_wrapper(self._gasprice)

        # web3
        if web3 is None:
            self.web3: Web3 = Web3(HTTPProvider(endpoint))
        else:
            self.web3 = web3
        try:
            # we use a PoA chain for smoketest, use this middleware to fix this
            self.web3.middleware_stack.inject(geth_poa_middleware, layer=0)
        except ValueError:
            # `middleware_stack.inject()` raises a value error if the same middleware is
            # injected twice. This happens with `eth-tester` setup where a single session
            # scoped web3 instance is used for all clients
            pass

        # create the connection test middleware (but only for non-tester chain)
        if not hasattr(web3, 'testing'):
            connection_test = make_connection_test_middleware(self)
            self.web3.middleware_stack.inject(connection_test, layer=0)

        supported, self.eth_node = is_supported_client(self.web3.version.node)

        if not supported:
            print('You need a Byzantium enabled ethereum node. Parity >= 1.7.6 or Geth >= 1.7.2')
            sys.exit(1)
Exemplo n.º 40
0
 def __call__(self, **kwargs):
     return cachetools.cached(cache=self.cache, lock=self.lock, **kwargs)
Exemplo n.º 41
0
 def __init__(self, host='localhost', port=5000, cache_time=5, **kwargs):
     super(RemoteResults, self).__init__()
     self.url = 'http://{host}:{port}/hpctl/v1'.format(host=host, port=port)
     cache = cachetools.TTLCache(maxsize=1000, ttl=cache_time)
     self.get = cachetools.cached(cache)(_get)
Exemplo n.º 42
0
def ttl_cache(func):
    return cachetools.cached(CACHE, lock=LOCK)(func)