def test_method_simple(self):
        manager = queryUtility(ICacheManager)
        content = TestContent()
        self.assertEqual(content.add(4), 4)
        # The method is now cached, so add is not called.
        self.assertEqual(content.add(4), 4)

        cache = manager.get_cache(
            'silva.core.cache.tests.test_descriptors.add')
        self.assertEqual(cache.get(_verify_key(('4',))), 4)
        self.assertRaises(KeyError, cache.get, _verify_key(('5',)))

        self.assertEqual(content.add(5), 9)
        # The method is now cached, so add is not called.
        self.assertEqual(content.add(5), 9)

        self.assertEqual(cache.get(_verify_key(('4',))), 4)
        self.assertEqual(cache.get(_verify_key(('5',))), 9)
Beispiel #2
0
def get(namespace, key, createfunc):
    if not config.CACHE_ENABLED:
        return createfunc()

    cache = cache_manager.get_cache_region(namespace, _CACHES['local'])

    def l2_createfunc():
        l2_cache = cache_manager.get_cache_region(namespace, _CACHES['remote'])
        return l2_cache.get(key=key, createfunc=createfunc)
    return cache.get(key=key, createfunc=l2_createfunc)
    def test_method_region(self):
        manager = queryUtility(ICacheManager)
        content = TestContent()
        self.assertEqual(content.remove(4), -4)
        # The method is now cached, so add is not called.
        self.assertEqual(content.remove(4), -4)

        self.assertTrue('test_descriptors' in beaker.cache.cache_regions)
        cache = manager.get_cache_from_region(
            'silva.core.cache.tests.test_descriptors.remove',
            'test_descriptors')
        self.assertEqual(cache.get(_verify_key(('4',))), -4)
        self.assertRaises(KeyError, cache.get, _verify_key(('-5',)))

        self.assertEqual(content.remove(-5), 1)
        # The method is now cached, so add is not called.
        self.assertEqual(content.remove(-5), 1)

        self.assertEqual(cache.get(_verify_key(('4',))), -4)
        self.assertEqual(cache.get(_verify_key(('-5',))), 1)
Beispiel #4
0
def get(namespace, key, createfunc):
    if not config.CACHE_ENABLED:
        return createfunc()

    cache = cache_manager.get_cache_region(namespace, _CACHES['local'])

    def l2_createfunc():
        l2_cache = cache_manager.get_cache_region(namespace, _CACHES['remote'])
        return l2_cache.get(key=key, createfunc=createfunc)

    return cache.get(key=key, createfunc=l2_createfunc)
    def test_property(self):
        manager = queryUtility(ICacheManager)
        content = TestContent()
        content.add(4)

        cache = manager.get_cache(
            'silva.core.cache.tests.test_descriptors.next')
        self.assertRaises(KeyError, cache.get, 'property')

        self.assertEqual(content.next, 5)
        # The method is now cached, so it is recomputed.
        self.assertEqual(content.next, 5)

        self.assertEqual(cache.get('property'), 5)
Beispiel #6
0
    def send( self,  path:str="", method:str="", params:tuple=(), depth:int=1 ):
        """ Marshals a request and actually sends it to the server, collecting 
        and json-decoding the response.

        This should not be something you ever need to call yourself.

        Args:
            path (str): The path after the host (eg empire, building, etc).  Don't include any directory separators.
            method (str): The name of the method to be run
            params (tuple of str): arguments/parameters to be passed to the method.
        Returns:
            dict: the json-decoded response from the server.
        Raises:
            lacauna.exceptions.NotJsonError: if the server response is not a JSON string
            lacauna.exceptions.ServerError: if the server responds with anything other than 
                a 200, along with a JSON string
        """

        url = self._build_url()
        if path:
            url = '/'.join( (url, path) )

        request = {
            "jsonrpc": "2.0",
            "id": 1,
            "method": method,
            "params": params,
        }
        request_json = json.dumps( request )
        if hasattr(self, 'debugging_method') and self.debugging_method == method:
            print( request_json )
            quit()

        def get_req():
            self._from_cache = False
            resp = requests.post( url, request_json )
            return resp

        resp = ''
        self._from_cache = False
        if self._cache_on:
            cache = self.cache.get_cache(self._cache_name, type='file', expire=self._cache_exp)
            cache_key = method + repr(params)
            self._from_cache = True
            resp = cache.get( key = cache_key, createfunc = get_req )
        else:
            resp = requests.post( url, request_json )

        ### The requests module leaves its sockets open in a pool.  This is 
        ### normally fine, but it generates ResourceWarnings when warnings are 
        ### on.  This includes during unit tests.  The simplest solution is 
        ### just to close the connection when we're finished with it.
        ### But with a regular run, at least sometimes, that resp object 
        ### doesn't have a connection attribute.  Probably because it came 
        ### from the cache.
        if hasattr(resp, 'connection'):
            resp.connection.close()

        emp_name = self._determine_empname()
        log_opts = { 'empire': emp_name, 'path': path, 'method': method, }

        ### The json parser will happily return a result when handed a raw 
        ### string instead of json (json.dumps("foobar") works just fine).  
        ### The module is documented to do this.
        ### 
        ### ...so an HTML page containing "server error" will not cause 
        ### json.loads() to produce a ValueError; it'll just be treated as a 
        ### big-ass string.
        ### 
        ### An error page like that should not have a JSON content-type, so 
        ### just checking that _should_ be enough.  But in the spirit of CYA, 
        ### I still want to confirm that the supposedly JSON string I'm 
        ### receiving when the content type indicates JSON, is actually JSON.  
        ### Hence the _looks_like_json() check.
        if resp.headers['content-type'] != 'application/json-rpc' or not \
            self._looks_like_json(resp.text):
                self.request_logger.error('Response is not JSON', extra=log_opts)
                raise lacuna.exceptions.NotJsonError( "Response from server is not json: " + resp.text )

        if resp.status_code != 200:
            json_error = json.loads( resp.text )
            error = lacuna.exceptions.ServerError( json_error['error']['code'], json_error['error']['message'] )

            if depth > 3:
                self.request_logger.error('Likely recursion detected', extra=log_opts)
                raise RuntimeError("Likely infinite recursion detected; bailing!")
            depth += 1

            if error.code == 1010 and re.match('Slow down', error.text) and self.sleep_after_error:
                self.request_logger.warning("60 RPC per minute limit exceeded.", extra=log_opts)
                sleepsecs = self.get_sleeptime()
                if self.warn_on_sleep:
                    self.request_logger.warning("Turning off cache and sleeping for {} seconds.".format(sleepsecs), extra=log_opts)
                self.cache_off()
                time.sleep( sleepsecs )
                thingy = self.send( path, method, params, depth )
            elif error.code == 1006 and error.text == 'Session expired.':
                ### Probably the user's config file had a session_id recorded, 
                ### but it's grown old.  Delete the old session_id, re-login, 
                ### and fix the params we're passing (session_id is the first 
                ### param).  Then re-send.
                self.request_logger.info('Stale session_id found; re-logging in.', extra=log_opts)
                if hasattr(self, 'session_id'):
                    delattr(self, 'session_id')
                self.login()
                fixed_params = (self.session_id, params[1:])
                thingy = self.send( path, method, fixed_params, depth )
            elif error.code == 1016 and error.text == 'Needs to solve a captcha.' and self.show_captcha:
                self.request_logger.info("Displaying required captcha.", extra=log_opts)
                cap = self.get_captcha()
                cap.showit()
                cap.prompt_user()
                cap.solveit()
                thingy = self.send( path, method, params, depth )
            else:
                self.request_logger.error("("+str(error.code)+") "+error.text, extra=log_opts)
                raise error
        else:
            cache_text = " (data from cache)" if self._from_cache else ""
            self.request_logger.info('Success'+cache_text, extra=log_opts)
            thingy = json.loads( resp.text )

        if self.sleep_on_call and not self._from_cache:
            time.sleep( float(self.sleep_on_call) )
        
        ### thingy contains:
        ###     {
        ###         "id": "1",
        ###         "jsonrpc": "2.0",
        ###         "result": { dict that we're actually interested in }
        ###     }
        ### We're only returning 'result', but sometimes we're recursing into 
        ### ourself (captcha, 60 RPC/min limit) - in those cases, thingy will 
        ### already be just 'result'.
        if 'result' in thingy:
            return thingy['result']
        else:
            return thingy
    
    # query to load Person by name, with criterion
    # of "person 10"
    q = Session.query(Person).\
                    options(FromCache("local_session", "by_name")).\
                    filter(Person.name=="person 10")
                    
    # load from DB
    person10 = q.one()
    
    # next call, the query is cached.
    person10 = q.one()

    # clear out the Session.  The "_beaker_cache" dictionary
    # disappears with it.
    Session.remove()
    
    # query calls from DB again
    person10 = q.one()
    
    # identity is preserved - person10 is the *same* object that's
    # ultimately inside the cache.   So it is safe to manipulate
    # the not-queried-for attributes of objects when using such a 
    # cache without the need to invalidate - however, any change 
    # that would change the results of a cached query, such as 
    # inserts, deletes, or modification to attributes that are 
    # part of query criterion, still require careful invalidation.
    from caching_query import _get_cache_parameters
    cache, key = _get_cache_parameters(q)
    assert person10 is cache.get(key)[0]
Beispiel #8
0
    # query to load Person by name, with criterion
    # of "person 10"
    q = Session.query(Person).\
                    options(FromCache("local_session", "by_name")).\
                    filter(Person.name=="person 10")

    # load from DB
    person10 = q.one()

    # next call, the query is cached.
    person10 = q.one()

    # clear out the Session.  The "_beaker_cache" dictionary
    # disappears with it.
    Session.remove()

    # query calls from DB again
    person10 = q.one()

    # identity is preserved - person10 is the *same* object that's
    # ultimately inside the cache.   So it is safe to manipulate
    # the not-queried-for attributes of objects when using such a
    # cache without the need to invalidate - however, any change
    # that would change the results of a cached query, such as
    # inserts, deletes, or modification to attributes that are
    # part of query criterion, still require careful invalidation.
    from caching_query import _get_cache_parameters
    cache, key = _get_cache_parameters(q)
    assert person10 is cache.get(key)[0]
Beispiel #9
0
    def send(self,
             path: str = "",
             method: str = "",
             params: tuple = (),
             depth: int = 1):
        """ Marshals a request and actually sends it to the server, collecting 
        and json-decoding the response.

        This should not be something you ever need to call yourself.

        Args:
            path (str): The path after the host (eg empire, building, etc).  Don't include any directory separators.
            method (str): The name of the method to be run
            params (tuple of str): arguments/parameters to be passed to the method.
        Returns:
            dict: the json-decoded response from the server.
        Raises:
            lacauna.exceptions.NotJsonError: if the server response is not a JSON string
            lacauna.exceptions.ServerError: if the server responds with anything other than 
                a 200, along with a JSON string
        """

        url = self._build_url()
        if path:
            url = '/'.join((url, path))

        request = {
            "jsonrpc": "2.0",
            "id": 1,
            "method": method,
            "params": params,
        }
        request_json = json.dumps(request)
        if hasattr(self,
                   'debugging_method') and self.debugging_method == method:
            print(request_json)
            quit()

        def get_req():
            self._from_cache = False
            resp = requests.post(url, request_json)
            return resp

        resp = ''
        self._from_cache = False
        if self._cache_on:
            cache = self.cache.get_cache(self._cache_name,
                                         type='file',
                                         expire=self._cache_exp)
            cache_key = method + repr(params)
            self._from_cache = True
            resp = cache.get(key=cache_key, createfunc=get_req)
        else:
            resp = requests.post(url, request_json)

        ### The requests module leaves its sockets open in a pool.  This is
        ### normally fine, but it generates ResourceWarnings when warnings are
        ### on.  This includes during unit tests.  The simplest solution is
        ### just to close the connection when we're finished with it.
        ### But with a regular run, at least sometimes, that resp object
        ### doesn't have a connection attribute.  Probably because it came
        ### from the cache.
        if hasattr(resp, 'connection'):
            resp.connection.close()

        emp_name = self._determine_empname()
        log_opts = {
            'empire': emp_name,
            'path': path,
            'method': method,
        }

        ### The json parser will happily return a result when handed a raw
        ### string instead of json (json.dumps("foobar") works just fine).
        ### The module is documented to do this.
        ###
        ### ...so an HTML page containing "server error" will not cause
        ### json.loads() to produce a ValueError; it'll just be treated as a
        ### big-ass string.
        ###
        ### An error page like that should not have a JSON content-type, so
        ### just checking that _should_ be enough.  But in the spirit of CYA,
        ### I still want to confirm that the supposedly JSON string I'm
        ### receiving when the content type indicates JSON, is actually JSON.
        ### Hence the _looks_like_json() check.
        if resp.headers['content-type'] != 'application/json-rpc' or not \
            self._looks_like_json(resp.text):
            self.request_logger.error('Response is not JSON', extra=log_opts)
            raise lacuna.exceptions.NotJsonError(
                "Response from server is not json: " + resp.text)

        if resp.status_code != 200:
            json_error = json.loads(resp.text)
            error = lacuna.exceptions.ServerError(
                json_error['error']['code'], json_error['error']['message'])

            if depth > 3:
                self.request_logger.error('Likely recursion detected',
                                          extra=log_opts)
                raise RuntimeError(
                    "Likely infinite recursion detected; bailing!")
            depth += 1

            if error.code == 1010 and re.match(
                    'Slow down', error.text) and self.sleep_after_error:
                self.request_logger.warning(
                    "60 RPC per minute limit exceeded.", extra=log_opts)
                sleepsecs = self.get_sleeptime()
                if self.warn_on_sleep:
                    self.request_logger.warning(
                        "Turning off cache and sleeping for {} seconds.".
                        format(sleepsecs),
                        extra=log_opts)
                self.cache_off()
                time.sleep(sleepsecs)
                thingy = self.send(path, method, params, depth)
            elif error.code == 1006 and error.text == 'Session expired.':
                ### Probably the user's config file had a session_id recorded,
                ### but it's grown old.  Delete the old session_id, re-login,
                ### and fix the params we're passing (session_id is the first
                ### param).  Then re-send.
                self.request_logger.info(
                    'Stale session_id found; re-logging in.', extra=log_opts)
                if hasattr(self, 'session_id'):
                    delattr(self, 'session_id')
                self.login()
                fixed_params = (self.session_id, params[1:])
                thingy = self.send(path, method, fixed_params, depth)
            elif error.code == 1016 and error.text == 'Needs to solve a captcha.' and self.show_captcha:
                self.request_logger.info("Displaying required captcha.",
                                         extra=log_opts)
                cap = self.get_captcha()
                cap.showit()
                cap.prompt_user()
                cap.solveit()
                thingy = self.send(path, method, params, depth)
            else:
                self.request_logger.error("(" + str(error.code) + ") " +
                                          error.text,
                                          extra=log_opts)
                raise error
        else:
            cache_text = " (data from cache)" if self._from_cache else ""
            self.request_logger.info('Success' + cache_text, extra=log_opts)
            thingy = json.loads(resp.text)

        if self.sleep_on_call and not self._from_cache:
            time.sleep(float(self.sleep_on_call))

        ### thingy contains:
        ###     {
        ###         "id": "1",
        ###         "jsonrpc": "2.0",
        ###         "result": { dict that we're actually interested in }
        ###     }
        ### We're only returning 'result', but sometimes we're recursing into
        ### ourself (captcha, 60 RPC/min limit) - in those cases, thingy will
        ### already be just 'result'.
        if 'result' in thingy:
            return thingy['result']
        else:
            return thingy