Exemplo n.º 1
0
 def test_client_behaviors(self):
     config = {
         'cache.lock_dir':'./lock', 
         'cache.data_dir':'./cache',  
         'cache.type':'ext:memcached', 
         'cache.url':mc_url,
         'cache.memcache_module':'pylibmc', 
         'cache.protocol':'binary', 
         'cache.behavior.ketama': 'True', 
         'cache.behavior.cas':False, 
         'cache.behavior.receive_timeout':'3600',
         'cache.behavior.send_timeout':1800, 
         'cache.behavior.tcp_nodelay':1,
         'cache.behavior.auto_eject':"0"
     }
     cache_manager = CacheManager(**parse_cache_config_options(config))
     cache = cache_manager.get_cache('test_behavior', expire=6000)
     
     with cache.namespace.pool.reserve() as mc:
         assert "ketama" in mc.behaviors
         assert mc.behaviors["ketama"] == 1
         assert "cas" in mc.behaviors
         assert mc.behaviors["cas"] == 0
         assert "receive_timeout" in mc.behaviors
         assert mc.behaviors["receive_timeout"] == 3600
         assert "send_timeout" in mc.behaviors
         assert mc.behaviors["send_timeout"] == 1800
         assert "tcp_nodelay" in mc.behaviors
         assert mc.behaviors["tcp_nodelay"] == 1
         assert "auto_eject" in mc.behaviors
         assert mc.behaviors["auto_eject"] == 0
Exemplo n.º 2
0
	def init_cache(self):
		cache_opts = {
				'cache.type': 'file',
				'cache.data_dir': '/tmp/cache/data',
				'cache.lock_dir': '/tmp/cache/lock'}
		cm = CacheManager(**parse_cache_config_options(cache_opts))
		return cm.get_cache('schlagzeile', expire=600)
Exemplo n.º 3
0
    def test_client_behaviors(self):
        config = {
            "cache.lock_dir": "./lock",
            "cache.data_dir": "./cache",
            "cache.type": "ext:memcached",
            "cache.url": mc_url,
            "cache.memcache_module": "pylibmc",
            "cache.protocol": "binary",
            "cache.behavior.ketama": "True",
            "cache.behavior.cas": False,
            "cache.behavior.receive_timeout": "3600",
            "cache.behavior.send_timeout": 1800,
            "cache.behavior.tcp_nodelay": 1,
            "cache.behavior.auto_eject": "0",
        }
        cache_manager = CacheManager(**parse_cache_config_options(config))
        cache = cache_manager.get_cache("test_behavior", expire=6000)

        with cache.namespace.pool.reserve() as mc:
            assert "ketama" in mc.behaviors
            assert mc.behaviors["ketama"] == 1
            assert "cas" in mc.behaviors
            assert mc.behaviors["cas"] == 0
            assert "receive_timeout" in mc.behaviors
            assert mc.behaviors["receive_timeout"] == 3600
            assert "send_timeout" in mc.behaviors
            assert mc.behaviors["send_timeout"] == 1800
            assert "tcp_nodelay" in mc.behaviors
            assert mc.behaviors["tcp_nodelay"] == 1
            assert "auto_eject" in mc.behaviors
            assert mc.behaviors["auto_eject"] == 0
Exemplo n.º 4
0
def setup_caching(data_dir):
    cache_opts = {
        'cache.type': 'dbm',
        'cache.data_dir': data_dir,
        'cache.lock_dir': data_dir,
        'cache.regions': 'short_term, long_term',
        'cache.short_term.type': 'dbm',
    }

    cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
    short_term_cache = cache_manager.get_cache('short_term', expire=240)
    return short_term_cache
Exemplo n.º 5
0
 def __init__(self):
     self.logger = logging.getLogger('radiovisserver.radiodns')
     self.cache = CacheManager(
         **parse_cache_config_options(config.CACHE_OPTS)).get_cache(
             'radiodns', expire=60)
     self.durablecache = pylibmc.Client(
         [config.MEMCACHED_HOST],
         binary=True,
         behaviors={
             "tcp_nodelay": True,
             "ketama": True,
         }
     )  # CacheManager(**parse_cache_config_options(config.CACHE_OPTS)).get_cache('radiodnsdurable')
	def __init__(self, namespace, **nsargs):
		super().__init__(namespace, **nsargs)
		cache_opts = {
			'cache.type': 'file',
			'cache.data_dir': 'cache/data',
			'cache.lock_dir': 'cache/lock',
			'cache.short_term.type': 'ext:memcached',
			'cache.short_term.url': '127.0.0.1.11211',
			'cache.short_term.expire': '3600',
			'cache.long_term.type': 'file',
			'cache.long_term.expire': '86400'
		}
		cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
		self.cache = cache_manager.get_cache(namespace, type='dbm')
Exemplo n.º 7
0
 def __init__(self, config):
     """Creates a cache using the supplied configuration parameters or defaults.
     """
     self.enableCache = (config.get('wmscapabilitycache.enable', 'True').lower() == 'true')
     if self.enableCache:
         cache_opts = {
             'cache.expire': config.get('wmscapabilitycache.expire', None),
             'cache.type': config.get('wmscapabilitycache.type', 'file'),
             'cache.data_dir': config.get('wmscapabilitycache.data_dir', '/tmp/ecomaps/wmscapabilitycache/data'),
             'cache.lock_dir': config.get('wmscapabilitycache.lock_dir', None)
             }
         cacheMgr = CacheManager(**parse_cache_config_options(cache_opts))
 
         self.cache = cacheMgr.get_cache('getWmsCapabilities')
     log.info("WMS capability caching %s" % ("enabled" if self.enableCache else "disabled"))
Exemplo n.º 8
0
    def test_cache(self):
        import time
        from beaker.cache import CacheManager
        cm = CacheManager()
        cache = cm.get_cache('auth_pubtkt_middleware', type='memory', expire=3600)
        app = DumbApp()
        app = AuthPubTKTMiddleware(app, rsa_pub, cache=cache)
        env = {}
        env['REMOTE_ADDR'] = '192.168.1.10'
        env['REQUEST_METHOD'] = 'GET'
        env['HTTP_COOKIE'] = 'auth_pubtkt="uid=user1;validuntil=1277190189;cip=192.168.1.10;tokens=editor,moderator;graceperiod=3600;udata=custom data;[email protected];display_name=John;sig=YaMhb5yXkfqOtQ87P5gYeh4kSgQev1c6XjqT0pXT/ojXj/qpswpyqWenNv3y5rcUPT++80zZPBVNFfwPUI5Crps5nHZP55FNPtBE337KYZ6KYoMEVQD6xqnouf5i1Jm5KwB1IfQdr8fvRQs2oqBIMMTkVyfv6yRRNWVPz+7xwxw="'

        
        app(env, dumb_start_response)
        app(env, dumb_start_response)
    def setUp(self):
        cache_opts = {
            'cache.type': 'memory',
            'cache.regions':
            'public.data,public.filtered_data,public.shortlived'
        }
        CacheManager(**parse_cache_config_options(cache_opts))
        self.__request = DummyRequest()
        # Must set hook_zca to false to work with uniittest_with_sqlite
        reg = Registry()
        reg.settings = {}
        reg.settings = {
            'extract.available_grades': '3,4,5,6,7,8,9,11',
            'hpz.file_upload_base_url': 'http://somehost:82/files',
            'extract.item_level_base_dir': '/opt/edware/item_level'
        }
        self.__config = testing.setUp(registry=reg,
                                      request=self.__request,
                                      hook_zca=False)
        self.__tenant_name = get_unittest_tenant_name()

        defined_roles = [(Allow, RolesConstants.ITEM_LEVEL_EXTRACTS,
                          ('view', 'logout'))]
        edauth.set_roles(defined_roles)
        set_tenant_map({get_unittest_tenant_name(): 'NC'})
        # Set up context security
        dummy_session = create_test_session(
            [RolesConstants.ITEM_LEVEL_EXTRACTS])
        self.__config.testing_securitypolicy(dummy_session.get_user())
        # celery settings for UT
        settings = {'extract.celery.CELERY_ALWAYS_EAGER': True}
        setup_celery(settings)
        # for UT purposes
        smarter.extracts.format.json_column_mapping = {}
        set_tenant_map({'tomcat': 'NC'})
Exemplo n.º 10
0
def test_cache_region_expire_is_always_int():
    try:
        cache = CacheManager(cache_regions={
            "short_term_with_string_expire": {
                "expire": "60",
                "type": "memory"
            }
        })

        # Check CacheManager registered the region in global regions
        assert "short_term_with_string_expire" in cache_regions

        @cache.region("short_term_with_string_expire")
        def load_with_str_expire(person):
            now = datetime.now()
            return "Hi there %s, its currently %s" % (person, now)

        # Ensure that same person gets same time
        msg = load_with_str_expire("fred")
        msg2 = load_with_str_expire("fred")
        assert msg == msg2, (msg, msg2)

    finally:
        # throw away region for this test
        cache_regions.pop("short_term_with_string_expire", None)
Exemplo n.º 11
0
def test_cache_region_has_default_key_length():
    try:
        cache = CacheManager(cache_regions={
            "short_term_without_key_length": {
                "expire": 60,
                "type": "memory"
            }
        })

        # Check CacheManager registered the region in global regions
        assert "short_term_without_key_length" in cache_regions

        @cache.region("short_term_without_key_length")
        def load_without_key_length(person):
            now = datetime.now()
            return "Hi there %s, its currently %s" % (person, now)

        # Ensure that same person gets same time
        msg = load_without_key_length("fred")
        msg2 = load_without_key_length("fred")
        assert msg == msg2, (msg, msg2)

        # Ensure that different person gets different time
        msg3 = load_without_key_length("george")
        assert msg3.split(",")[-1] != msg2.split(",")[-1]

    finally:
        # throw away region for this test
        cache_regions.pop("short_term_without_key_length", None)
Exemplo n.º 12
0
def test_directory_goes_away():
    cache = CacheManager(
        cache_regions={
            "short_term_without_key_length": {
                "expire": 60,
                "type": "dbm",
                "data_dir": "/tmp/beaker-tests/cache/data",
                "lock_dir": "/tmp/beaker-tests/cache/lock",
            }
        })

    @cache.region("short_term_without_key_length")
    def load_with_str_expire(person):
        now = datetime.now()
        return "Hi there %s, its currently %s" % (person, now)

    # Ensure that same person gets same time
    msg = load_with_str_expire("fred")
    msg2 = load_with_str_expire("fred")

    shutil.rmtree("/tmp/beaker-tests")

    msg3 = load_with_str_expire("fred")
    assert msg == msg2, (msg, msg2)
    assert msg2 != msg3, (msg2, msg3)
Exemplo n.º 13
0
def main(global_config, **settings):
    """Application factory"""
    config = Configurator(settings=settings, root_factory=RootFactory)
    declare_routes(config)

    session_factory = SignedCookieSessionFactory(
        settings.get('session_key', 'itsaseekreet'))
    config.set_session_factory(session_factory)

    global cache
    cache = CacheManager(**parse_cache_config_options(settings))

    from .authnz import APIKeyAuthenticationPolicy
    api_key_authn_policy = APIKeyAuthenticationPolicy()
    config.include('openstax_accounts')
    openstax_authn_policy = config.registry.getUtility(
        IOpenstaxAccountsAuthenticationPolicy)
    policies = [api_key_authn_policy, openstax_authn_policy]
    authn_policy = MultiAuthenticationPolicy(policies)
    config.set_authentication_policy(authn_policy)
    authz_policy = ACLAuthorizationPolicy()
    config.set_authorization_policy(authz_policy)

    config.scan(ignore='cnxpublishing.tests')
    return config.make_wsgi_app()
Exemplo n.º 14
0
def test_directory_goes_away():
    cache = CacheManager(cache_regions={
        'short_term_without_key_length':{
            'expire': 60,
            'type': 'dbm',
            'data_dir': '/tmp/beaker-tests/cache/data',
            'lock_dir': '/tmp/beaker-tests/cache/lock'
        }
    })


    @cache.region('short_term_without_key_length')
    def load_with_str_expire(person):
        now = datetime.now()
        return "Hi there %s, its currently %s" % (person, now)


    # Ensure that same person gets same time
    msg = load_with_str_expire('fred')
    msg2 = load_with_str_expire('fred')

    shutil.rmtree('/tmp/beaker-tests')

    msg3 = load_with_str_expire('fred')
    assert msg == msg2, (msg, msg2)
    assert msg2 != msg3, (msg2, msg3)
    def setUp(self):
        self.__temp_dir = tempfile.mkdtemp()

        self.settings = {}
        self.settings['cache.regions'] = 'public.data, session'
        self.settings['cache.type'] = 'memory'
        self.settings['batch.user.session.timeout'] = 10777700
        self.settings['auth.policy.secret'] = 'secret'
        self.settings['auth.policy.cookie_name'] = 'myName'
        self.settings['auth.policy.hashalg'] = 'sha1'
        self.settings['application.url'] = 'dummy:1234'
        self.settings['services.celery.CELERY_ALWAYS_EAGER'] = True
        self.settings['pdf.base.url'] = 'http://dummy:8234'
        self.settings['pdf.batch.job.queue'] = 'dummyQueue'
        self.settings['pdf.health_check.job.queue'] = 'dummyQueue'
        self.settings['pdf.report_base_dir'] = self.__temp_dir
        self.settings['pdf.minimum_file_size'] = 0

        CacheManager(**parse_cache_config_options(self.settings))

        setup_celery(self.settings)

        component.provideUtility(SessionBackend(self.settings),
                                 ISessionBackend)
        self.pdf_generator = PDFGenerator(self.settings, 'myTenant')
Exemplo n.º 16
0
 def setUp(self):
     self.reg = Registry()
     self.__work_zone_dir = tempfile.TemporaryDirectory()
     self.reg.settings = {
         'extract.work_zone_base_dir': '/tmp/work_zone',
         'pickup.gatekeeper.t1': '/t/acb',
         'pickup.gatekeeper.t2': '/a/df',
         'pickup.gatekeeper.y': '/a/c',
         'pickup.sftp.hostname': 'hostname.local.net',
         'pickup.sftp.user': '******',
         'pickup.sftp.private_key_file': '/home/users/myUser/.ssh/id_rsa',
         'extract.available_grades': '3,4,5,6,7,8,11'
     }
     cache_opts = {
         'cache.type': 'memory',
         'cache.regions':
         'public.data,public.filtered_data,public.shortlived'
     }
     CacheManager(**parse_cache_config_options(cache_opts))
     # Set up user context
     self.__request = DummyRequest()
     # Must set hook_zca to false to work with unittest_with_sqlite
     self.__config = testing.setUp(registry=self.reg,
                                   request=self.__request,
                                   hook_zca=False)
     defined_roles = [(Allow, RolesConstants.SAR_EXTRACTS, ('view',
                                                            'logout'))]
     edauth.set_roles(defined_roles)
     set_tenant_map({get_unittest_tenant_name(): 'NC'})
     dummy_session = create_test_session([RolesConstants.SAR_EXTRACTS])
     self.__config.testing_securitypolicy(dummy_session.get_user())
Exemplo n.º 17
0
    def __init__(self, config):
        """One instance of Globals is created during application
        initialization and is available during requests via the
        'app_globals' variable

        """
        self.cache = CacheManager(**parse_cache_config_options(config))
Exemplo n.º 18
0
 def __init__(self, type='memory', **kwargs):
     opts = kwargs or {
         'data_dir': '/tmp/messengerbot-cache/data',
         'lock_dir': '/tmp/messengerbot-cache/lock'
     }
     opts['type'] = type
     self.cachemgr = CacheManager(**opts)
 def setUp(self):
     self.reg = Registry()
     self.__work_zone_dir = tempfile.TemporaryDirectory()
     self.reg.settings = {
         'extract.work_zone_base_dir': '/tmp/work_zone',
         'extract.available_grades': '3,4,5,6,7,8,11',
         'hpz.file_upload_base_url': 'http://somehost:82/files'
     }
     settings = {'extract.celery.CELERY_ALWAYS_EAGER': True}
     setup_celery(settings)
     cache_opts = {
         'cache.type': 'memory',
         'cache.regions':
         'public.data,public.filtered_data,public.shortlived'
     }
     CacheManager(**parse_cache_config_options(cache_opts))
     # Set up user context
     self.__request = DummyRequest()
     # Must set hook_zca to false to work with unittest_with_sqlite
     self.__config = testing.setUp(registry=self.reg,
                                   request=self.__request,
                                   hook_zca=False)
     defined_roles = [
         (Allow, RolesConstants.SRS_EXTRACTS, ('view', 'logout')),
         (Allow, RolesConstants.SRC_EXTRACTS, ('view', 'logout'))
     ]
     edauth.set_roles(defined_roles)
     dummy_session = create_test_session(
         [RolesConstants.SRS_EXTRACTS, RolesConstants.SRC_EXTRACTS])
     self.__config.testing_securitypolicy(dummy_session.get_user())
     set_tenant_map({get_unittest_tenant_name(): 'NC'})
Exemplo n.º 20
0
 def __init__(self, config):
     cache_opts = {
         'cache.type': getattr(config, 'citation_cache_type', 'file'),
         'cache.data_dir': getattr(config, 'citation_cache_data_dir', None),
         'cache.lock_dir': getattr(config, 'citation_cache_lock_dir', None),
     }
     self._cache = CacheManager(**parse_cache_config_options(cache_opts)).get_cache('doi')
 def setUp(self):
     CacheManager(
         **parse_cache_config_options({
             'cache.regions': 'public.shortlived',
             'cache.type': 'memory',
             'cache.public.shortlived.expire': 7200
         }))
Exemplo n.º 22
0
    def __init__(self, config):
        """One instance of Globals is created during application
        initialization and is available during requests via the
        'app_globals' variable

        """
        self.cache = CacheManager(**parse_cache_config_options(config))
        if config.has_key("htpasswd.file"):
            self.passwdfile = HtpasswdFile(config['htpasswd.file'])
            self.passwdfile.load()

        if config.has_key("mediators.dir"):
            self.mediatorsdir = config['mediators.dir']

        if config.has_key("mediators.list"):
            self.mediatorslist = config['mediators.list']

        if config.has_key("vocabularies.dir"):
            self.vocabulariesdir = config['vocabularies.dir']

        if config.has_key("vocabularies.ref"):
            self.vocabulariesref = config['vocabularies.ref']

        if config.has_key("ext_vocabularies.dir"):
            self.extvocabulariesdir = config['ext_vocabularies.dir']

        if config.has_key("svn.username"):
            self.svnusername = config['svn.username']

        if config.has_key("svn.password"):
            self.svnpassword = config['svn.password']

        if config.has_key("conversion_template"):
            self.conversion_template = config['conversion_template']
    def __init__(self, config):
        """One instance of Globals is created during application
        initialization and is available during requests via the
        'app_globals' variable

        """
        self.cache = CacheManager(**parse_cache_config_options(config))

        cache_spec = config.get('buildapi.cache')
        tz_name = config.get('timezone')
        tz = pytz.timezone(tz_name)
        self.tz = tz

        self.masters_url = config['masters_url']
        self.branches_url = config['branches_url']

        if hasattr(cacher, 'RedisCache') and cache_spec.startswith('redis:'):
            # TODO: handle other hosts/ports
            bits = cache_spec.split(':')
            kwargs = {}
            if len(bits) >= 2:
                kwargs['host'] = bits[1]

            if len(bits) == 3:
                kwargs['port'] = int(bits[2])
            buildapi_cacher = cacher.RedisCache(**kwargs)
        elif hasattr(cacher,
                     'MemcacheCache') and cache_spec.startswith('memcached:'):
            hosts = cache_spec[10:].split(',')
            buildapi_cacher = cacher.MemcacheCache(hosts)
        else:
            raise RuntimeError("invalid cache spec %r" % (cache_spec, ))

        self.buildapi_cache = cache.BuildapiCache(buildapi_cacher, tz)
Exemplo n.º 24
0
def test_cache_region_expire_is_always_int():
    try:
        cache = CacheManager(cache_regions={
            'short_term_with_string_expire': {
                'expire': '60',
                'type': 'memory'
            }
        })

        # Check CacheManager registered the region in global regions
        assert 'short_term_with_string_expire' in cache_regions

        @cache.region('short_term_with_string_expire')
        def load_with_str_expire(person):
            now = datetime.now()
            return "Hi there %s, its currently %s" % (person, now)

        # Ensure that same person gets same time
        msg = load_with_str_expire('fred')
        msg2 = load_with_str_expire('fred')
        assert msg == msg2, (msg, msg2)

    finally:
        # throw away region for this test
        cache_regions.pop('short_term_with_string_expire', None)
Exemplo n.º 25
0
def test_cache_region_has_default_key_length():
    try:
        cache = CacheManager(cache_regions={
            'short_term_without_key_length':{
                'expire': 60,
                'type': 'memory'
            }
        })

        # Check CacheManager registered the region in global regions
        assert 'short_term_without_key_length' in cache_regions

        @cache.region('short_term_without_key_length')
        def load_without_key_length(person):
            now = datetime.now()
            return "Hi there %s, its currently %s" % (person, now)

        # Ensure that same person gets same time
        msg = load_without_key_length('fred')
        msg2 = load_without_key_length('fred')
        assert msg == msg2, (msg, msg2)

        # Ensure that different person gets different time
        msg3 = load_without_key_length('george')
        assert msg3.split(',')[-1] != msg2.split(',')[-1]

    finally:
        # throw away region for this test
        cache_regions.pop('short_term_without_key_length', None)
Exemplo n.º 26
0
def init_beaker_cache():
    cache_opts = {
        'cache.type': 'memory',
        'cache.regions': 'mistic',
        'cache.mistic.expire': 300
    }

    cache = CacheManager(**parse_cache_config_options(cache_opts))
 def setUp(self):
     cache_managers.clear()
     cache_opts = {
         'cache.type': 'memory',
         'cache.regions': 'public.data, public.filtered_data, unittest, public.shortlived, public.very_shortlived'
     }
     CacheManager(**parse_cache_config_options(cache_opts))
     self.tenant = get_unittest_tenant_name()
Exemplo n.º 28
0
 def setUp(self):
     cache_managers.clear()
     cache_opts = {
         'cache.type': 'memory',
         'cache.regions': 'dummyunittest',
         'cache.expire': 10
     }
     self.cache_mgr = CacheManager(**parse_cache_config_options(cache_opts))
 def setUp(self):
     reg = {}
     reg['cache.expire'] = 10
     reg['cache.regions'] = 'session'
     reg['cache.type'] = 'memory'
     reg['batch.user.session.timeout'] = 1
     self.cachemgr = CacheManager(**parse_cache_config_options(reg))
     self.backend = BeakerBackend(reg)
Exemplo n.º 30
0
def setup(config):
    '''Setup module-global CacheManager'''
    global _cache_manager

    opts = parse_cache_config_options(config)
    _cache_manager = CacheManager(**opts)

    return
Exemplo n.º 31
0
 def setUp(self):
     reg = {}
     reg['cache.expire'] = 10
     reg['cache.regions'] = 'session'
     reg['cache.type'] = 'memory'
     reg['batch.user.session.timeout'] = 15
     CacheManager(**parse_cache_config_options(reg))
     component.provideUtility(SessionBackend(reg), ISessionBackend)
Exemplo n.º 32
0
    def __init__(self, app, config=None, environ_key='beaker.cache', **kwargs):
        """Initialize the Cache Middleware
        
        The Cache middleware will make a Cache instance available
        every request under the ``environ['beaker.cache']`` key by
        default. The location in environ can be changed by setting
        ``environ_key``.
        
        ``config``
            dict  All settings should be prefixed by 'cache.'. This
            method of passing variables is intended for Paste and other
            setups that accumulate multiple component settings in a
            single dictionary. If config contains *no cache. prefixed
            args*, then *all* of the config options will be used to
            intialize the Cache objects.
        
        ``environ_key``
            Location where the Cache instance will keyed in the WSGI
            environ
        
        ``**kwargs``
            All keyword arguments are assumed to be cache settings and
            will override any settings found in ``config``

        """
        self.app = app
        config = config or {}

        # Load up the default params
        self.options = dict(type='memory',
                            data_dir=None,
                            timeout=None,
                            log_file=None)

        # Pull out any config args starting with beaker cache. if there are any
        for dct in [config, kwargs]:
            for key, val in dct.iteritems():
                if key.startswith('beaker.cache.'):
                    self.options[key[13:]] = val
                if key.startswith('cache.'):
                    self.options[key[6:]] = val
                if key.startswith('cache_'):
                    warnings.warn(
                        'Cache options should start with cache. '
                        'instead of cache_', DeprecationWarning, 2)
                    self.options[key[6:]] = val

        # Coerce and validate cache params
        coerce_cache_params(self.options)

        # Assume all keys are intended for cache if none are prefixed with
        # 'cache.'
        if not self.options and config:
            self.options = config

        self.options.update(kwargs)
        self.cache_manager = CacheManager(**self.options)
        self.environ_key = environ_key
Exemplo n.º 33
0
class BeakerCache(object):
    def __init__(self):
        cache_opts = {
            'cache.type': 'memory',
            #             'cache.expire':1000000000
            #  'cache.type': 'file',
            # 'cache.data_dir': '/tmp/cache/data',
            #'cache.lock_dir': '/tmp/cache/lock'
        }
        self.cache = CacheManager(**parse_cache_config_options(cache_opts))

    def get(self, key, region):
        return self.cache.get_cache(region).get(key)

    def put(self, key, value, region):
        self.cache.get_cache(region).put(key, value)

    def remove(self, key, region):
        self.cache.get_cache(region).remove_value(key)

    def hasKey(self, key, region):
        return self.cache.get_cache(region).has_key(key)

    def getRegion(self, region):
        return self.cache.get_cache(region)
Exemplo n.º 34
0
def cachemanager(envpath):
    """Configuration parameters for cache module. Beaker is used as the back
    end """
    cacheconf = {
        'cache.type': 'file',
        'cache.data_dir': join(envpath, datadir),
        'cache.lock_dir': join(envpath, lockdir),
    }
    return CacheManager(**parse_cache_config_options(cacheconf))
Exemplo n.º 35
0
    def __init__(self, credentialInterfaces, credentialsChecker):
        self.credentialInterfaces = credentialInterfaces
        self.checker = credentialsChecker

        #initialize cache
        cacheOptions = {
            'cache.type': 'memory',
        }
        self.cache = CacheManager(**parse_cache_config_options(cacheOptions))
class TestBeakerBackend(unittest.TestCase):
    def setUp(self):
        reg = {}
        reg['cache.expire'] = 10
        reg['cache.regions'] = 'session'
        reg['cache.type'] = 'memory'
        reg['batch.user.session.timeout'] = 1
        self.cachemgr = CacheManager(**parse_cache_config_options(reg))
        self.backend = BeakerBackend(reg)

    def __get_region(self, key):
        return self.cachemgr.get_cache_region('edware_session_' + key,
                                              'session')

    def test_create_new_session(self):
        session = Session()
        session.set_session_id('123')
        self.backend.create_new_session(session)
        self.assertIsNotNone(self.__get_region('123').get('123'))

    def test_get_session_from_persistence_with_existing_session(self):
        session = Session()
        session.set_session_id('456')
        session.set_uid('abc')
        self.backend.create_new_session(session)
        lookup = self.backend.get_session('456')
        self.assertEqual(lookup.get_uid(), 'abc')

    def test_get_session_invalid_session(self):
        lookup = self.backend.get_session('idontexist')
        self.assertIsNone(lookup)

    def test_delete_session(self):
        session = Session()
        session.set_session_id('456')
        session.set_uid('abc')
        self.backend.create_new_session(session)
        self.backend.delete_session('456')
        self.assertFalse('456' in self.__get_region('456'))

    def test_update_session(self):
        session = Session()
        session.set_session_id('456')
        session.set_uid('abc')
        self.backend.create_new_session(session)
        session.set_uid('def')
        self.backend.update_session(session)
        lookup = self.__get_region('456').get('456')
        self.assertEquals(lookup.get_uid(), 'def')

    def test_overwrite_timeout(self):
        session = Session()
        session.set_session_id('1456')
        session.set_uid('abc')
        self.backend.create_new_session(session, overwrite_timeout=True)
        time.sleep(1)
        self.assertIsNone(self.backend.get_session('1456'))
Exemplo n.º 37
0
    def __init__(self, config):
        """One instance of Globals is created during application
        initialization and is available during requests via the
        'app_globals' variable

        """
        self.cache = CacheManager(**parse_cache_config_options(config))
        atexit.register(tm_pylons.shutdown_extension)
        interface.start(tm_pylons.FakeConfigObj(config))
class TestBeakerBackend(unittest.TestCase):

    def setUp(self):
        reg = {}
        reg['cache.expire'] = 10
        reg['cache.regions'] = 'session'
        reg['cache.type'] = 'memory'
        reg['batch.user.session.timeout'] = 1
        self.cachemgr = CacheManager(**parse_cache_config_options(reg))
        self.backend = BeakerBackend(reg)

    def __get_region(self, key):
        return self.cachemgr.get_cache_region('edware_session_' + key, 'session')

    def test_create_new_session(self):
        session = Session()
        session.set_session_id('123')
        self.backend.create_new_session(session)
        self.assertIsNotNone(self.__get_region('123').get('123'))

    def test_get_session_from_persistence_with_existing_session(self):
        session = Session()
        session.set_session_id('456')
        session.set_uid('abc')
        self.backend.create_new_session(session)
        lookup = self.backend.get_session('456')
        self.assertEqual(lookup.get_uid(), 'abc')

    def test_get_session_invalid_session(self):
        lookup = self.backend.get_session('idontexist')
        self.assertIsNone(lookup)

    def test_delete_session(self):
        session = Session()
        session.set_session_id('456')
        session.set_uid('abc')
        self.backend.create_new_session(session)
        self.backend.delete_session('456')
        self.assertFalse('456' in self.__get_region('456'))

    def test_update_session(self):
        session = Session()
        session.set_session_id('456')
        session.set_uid('abc')
        self.backend.create_new_session(session)
        session.set_uid('def')
        self.backend.update_session(session)
        lookup = self.__get_region('456').get('456')
        self.assertEquals(lookup.get_uid(), 'def')

    def test_overwrite_timeout(self):
        session = Session()
        session.set_session_id('1456')
        session.set_uid('abc')
        self.backend.create_new_session(session, overwrite_timeout=True)
        time.sleep(1)
        self.assertIsNone(self.backend.get_session('1456'))
Exemplo n.º 39
0
    def __init__(self, config):
        """One instance of Globals is created during application
        initialization and is available during requests via the
        'app_globals' variable

        """
        self.mongodb_conn = Connection(config['mongodb.url'])
        self.mongodb = self.mongodb_conn[config['mongodb.db_name']]
        self.cache = CacheManager(**parse_cache_config_options(config))
Exemplo n.º 40
0
    def __init__(self, app,
                 cookie_cache=True,
                 cache_opts=None,
                 debug=False,
                 cookie_max_age=0,
                 classifiers=[]):
        self.debug = debug
        self.cookie_cache = cookie_cache
        cache_manager = CacheManager(
            **parse_cache_config_options(cache_opts or
                                         self.DEFAULT_CACHE_OPTIONS))
        self.cache = cache_manager.get_cache('mobi.devices')

        if self.debug:
            logger.info('MobiDeviceMiddleware start in debug mode.')
        self.app = app
        self.set_cookie_max_age(int(cookie_max_age))
        self.classifiers = classifiers if isinstance(classifiers, list) \
                else [classifiers]
Exemplo n.º 41
0
 def test_flush_data(self):
     cache_opts = {
         'cache.type': 'memory',
         'cache.regions': 'public.data'
     }
     cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
     params = {}
     params['stateCode'] = 'NC'
     mycache = cache_manager.get_cache('my_namespace', **params)
     f = FakeFunc('hello')
     cache = mycache.get("my_namespace {'stateCode': 'NC'}", createfunc=f.fake)
     self.assertEqual(cache, f.msg)
     f.msg = 'bye'
     self.assertNotEqual(cache, f.msg)
     cache_flush_data()
     f.msg = 'bye'
     mycache = cache_manager.get_cache('my_namespace', **params)
     cache = mycache.get("my_namespace {'stateCode': 'NC'}", createfunc=f.fake)
     self.assertEqual(cache, f.msg)
Exemplo n.º 42
0
 def __init__(self):
     self.logger = logging.getLogger('radiovisserver.radiodns')
     self.cache = CacheManager(**parse_cache_config_options(config.CACHE_OPTS)).get_cache('radiodns', expire=60)
     self.durablecache = pylibmc.Client(
         [config.MEMCACHED_HOST],
         binary=True,
         behaviors={
             "tcp_nodelay": True,
             "ketama": True,
         }
     )  # CacheManager(**parse_cache_config_options(config.CACHE_OPTS)).get_cache('radiodnsdurable')
Exemplo n.º 43
0
 def __init__(self, path, expire):
   cache_opts = {
       'cache.type': 'dbm',
       'cache.data_dir': path,
       'cache.expire': expire
       }
   self.cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
   self.etag_cache = self.cache_manager.get_cache('etags', expire=365*24*60*60)
   self.fn_cache = self.cache_manager.get_cache('processed')
   self.url_cache = self.cache_manager.get_cache('urls')
   self.id_cache = self.cache_manager.get_cache('id')
Exemplo n.º 44
0
def setup_caching(config):
    cache_type = config.get('caching', 'type')
    data_dir = config.get('caching', 'data_dir')
    short_expire = config.get('caching', 'short_expire')
    long_expire = config.get('caching', 'long_expire')

    cache_opts = {
        'cache.type': cache_type,
        'cache.data_dir': data_dir,
        'cache.lock_dir': data_dir,
        'cache.regions': 'short_term, long_term',
        'cache.short_term.type': cache_type,
        'cache.short_term.expire': short_expire,
        'cache.long_term.type': cache_type,
        'cache.long_term.expire': long_expire,
    }

    cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
    short_term_cache = cache_manager.get_cache('short_term', expire=short_expire)
    long_term_cache = cache_manager.get_cache('long_term', expire=long_expire)
    return short_term_cache, long_term_cache
Exemplo n.º 45
0
class BeakerBackend(Backend):
    '''
    Manipulates session that resides in persistent storage (memory, memcached)
    '''
    def __init__(self, settings):
        self.cache_mgr = CacheManager(**parse_cache_config_options(settings))
        self.batch_timeout = int(settings.get('batch.user.session.timeout'))

    def create_new_session(self, session, overwrite_timeout=False):
        '''
        Creates a new session
        '''
        self.update_session(session, overwrite_timeout=overwrite_timeout)

    def update_session(self, session, overwrite_timeout=False):
        '''
        Given a session, persist it
        '''
        _id = session.get_session_id()
        region = self.__get_cache_region(_id)
        # Overwrite the timeout for batch user sessions
        if overwrite_timeout:
            region.expiretime = self.batch_timeout
        region.put(_id, session)

    def get_session(self, session_id):
        '''
        Return session from persistent storage
        '''
        region = self.__get_cache_region(session_id)
        if session_id not in region:
            logger.info('Session is not found in cache. It may have expired or connection to memcached is down')
            return None
        return region.get(session_id)

    def delete_session(self, session_id):
        '''
        Delete session from persistent storage
        '''
        # delete from db doesn't work
        region = self.__get_cache_region(session_id)
        if session_id in region:
            # works for memcached
            region.remove_value(session_id)

    def __get_cache_region(self, key):
        return self.cache_mgr.get_cache_region('edware_session_' + key, 'session')

    def clear(self):
        '''
        clear cache
        '''
        self.cache_region.clear()
Exemplo n.º 46
0
class SessionManager(object):

    def __init__(self, type='memory', **kwargs):
        opts = kwargs or {
            'data_dir': '/tmp/messengerbot-cache/data',
            'lock_dir': '/tmp/messengerbot-cache/lock'
        }
        opts['type'] = type
        self.cachemgr = CacheManager(**opts)

    def get_session(self, event):
        ns = '.'.join([event['recipient']['id'],event['sender']['id']])
        cache = self.cachemgr.get_cache(ns)
        return Session(cache)
Exemplo n.º 47
0
def make_app(global_conf,
             pub_key,
             key_type='RSA',
             cookie_name=None,
             hdr_prefix=None,
             log_name=None,
             **app_conf):
    """Paste application factory"""
    
    pub_key = RSA.load_pub_key(pub_key) if key_type == 'RSA' else DSA.load_pub_key(pub_key)
    params = {}
    if cookie_name is not None:
        params['cookie_name'] = cookie_name
    if hdr_prefix is not None:
        params['hdr_prefix'] = hdr_prefix
    if log_name is not None:
        params['log_name'] = log_name
    cache_opts = parse_cache_config_options(app_conf)
    if cache_opts.get('enabled') == True:
        cache_mgr = CacheManager(**cache_opts)
        cache = cache_mgr.get_cache('tickets_cache')
        params['cache'] = cache

    return AuthRequestApp(pub_key, **params)
Exemplo n.º 48
0
 def __init__(self):
     """initialize the class CacheManager
     More configuration refer to http://beaker.readthedocs.org/en/latest/caching.html#about
     """
     # store the basic configuration
     self.cache_opts = {
         'cache.type': 'file',
         # can be "memory" or "file"
         'cache.data_dir': '/tmp/cache/data',
         'cache.lock_dir': '/tmp/cache/lock'
     }
     # create CacheManager instance with cache_opts
     self.cache = CacheManager(**parse_cache_config_options(self.cache_opts))
     # In addition to the defaults supplied to the CacheManager instance,
     # any of the Cache options can be changed on a per-namespace basis,
     # by setting a type, and expire option.
     self.tmpl_cache = self.cache.get_cache('mytemplate', type='file', expire=3600)
Exemplo n.º 49
0
class DoiCache(object):

    def __init__(self, config):
        cache_opts = {
            'cache.type': getattr(config, 'citation_cache_type', 'file'),
            'cache.data_dir': getattr(config, 'citation_cache_data_dir', None),
            'cache.lock_dir': getattr(config, 'citation_cache_lock_dir', None),
        }
        self._cache = CacheManager(**parse_cache_config_options(cache_opts)).get_cache('doi')

    def _raw_get_bibtex(self, doi):
        dx_url = "http://dx.doi.org/" + doi
        headers = {'Accept': 'text/bibliography; style=bibtex, application/x-bibtex'}
        req = requests.get(dx_url, headers=headers)
        return req.text

    def get_bibtex(self, doi):
        createfunc = functools.partial(self._raw_get_bibtex, doi)
        return self._cache.get(key=doi, createfunc=createfunc)
Exemplo n.º 50
0
 def __init__(self):
     self._parse_config()
     self.bcm = BCM(**self._parse_config())
     self.regions = self.bcm.regions
Exemplo n.º 51
0
"""
The cache settings have to come from the plugged application...

Or will the cache simply be standalone?
After all, is there any real need for this
to be defined in the plugged app itself
"""


from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': '/tmp/cache/data',
    'cache.lock_dir': '/tmp/cache/lock'
}

cache_mgr = CacheManager(**parse_cache_config_options(cache_opts))
cache = cache_mgr.get_cache('elfinder')


Exemplo n.º 52
0
from auth import verify_orm_rights
import uuid
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

from decimal import Decimal#depois apagar isto quando tirar daqui o to_decimal
import traceback
import ujson

cache_opts = {
    'cache.type': 'memory',
    'cache.data_dir': '/tmp/cache/data',
    'cache.lock_dir': '/tmp/cache/lock'
}

cache = CacheManager(**parse_cache_config_options(cache_opts))
erp_cache = cache.get_cache('erp_cache', type='memory', expire=10)
short_cache = cache.get_cache('short_cache', type='memory', expire=10)


def get_context(window_id):
    #print('Im on get_context', window_id)
    with open('../tmp/{window_id}ctx.json'.format(window_id=window_id), mode='r' , encoding='utf-8') as json_file:
        json_string = json_file.read()
        #print (json_string)
        ctx_dict = ujson.loads(json_string)
    return ctx_dict


def set_context(window_id, ctx_dict):
    #print('Im on set_context', window_id)
Exemplo n.º 53
0
class RadioDns_():
    """Class to handle connection to the radioDns database: listing of topics and logins, special topic rules"""

    CACHE_DURATION = 600

    def __init__(self):
        self.logger = logging.getLogger('radiovisserver.radiodns')
        self.cache = CacheManager(**parse_cache_config_options(config.CACHE_OPTS)).get_cache('radiodns', expire=60)
        self.durablecache = pylibmc.Client(
            [config.MEMCACHED_HOST],
            binary=True,
            behaviors={
                "tcp_nodelay": True,
                "ketama": True,
            }
        )  # CacheManager(**parse_cache_config_options(config.CACHE_OPTS)).get_cache('radiodnsdurable')

    def do_query(self, url, params):
        try:
            return requests.get(config.API_URL + url, data=params).json()
        except:
            # Ommit params as it's may contain passwords
            self.logger.error("Error trying query %s" % (url,))
            return None

    def check_auth(self, user, password, ip):
        """Check an username and password"""

        self.logger.debug("Checking username and password for %s" % (user,))

        result = self.do_query('check_auth', {'username': user, 'password': password, 'ip': ip})

        if result:
            if result['result']:
                self.logger.debug("Password ok")
                return True
            else:
                self.logger.warning("Cannot auth: %s" % (result['error'],))
                return False
        else:
            self.logger.error("No reply when check_auth ?")
            return False

    def get_channels(self, station_id):
        """Return the list of channels for a station. Use cachinig of 1 minute"""

        self.logger.debug("Getting channels of %s" % (station_id,))

        def get_channels():
            self.logger.debug("Query channels of %s, nothing in cache !" % (station_id,))

            result = self.do_query('get_channels', {'station_id': station_id})

            if result is None:
                self.logger.error("No reply when get_channels ?")
                return []

            return result['list']

        return self.cache.get(key="get_channels-" + station_id, createfunc=get_channels)

    def update_channel_topics(self):
        """Update the channel cache from database"""
        try:
            self.logger.debug("Updating channel topic list for durable cache.")
            new_topics = []
            for (channel, id) in self.get_all_vis_channels():
                new_topics.append(channel)

            self.logger.debug(
                "Setting radiovis_channels_topics channel topic list with %s elements." % (len(new_topics)))
            self.durablecache.set('radiovis_channels_topics', new_topics, time=RadioDns.CACHE_DURATION)
        except:
            e = sys.exc_info()[0]
            self.logger.error("Error trying to update channel topics in durable cache. %s" % (e))
            return

    def contains_channel_topic(self, topic):
        """Checks if cache contains a particular channel"""
        try:
            # Normalize to ignore /image and /text
            topic = topic.rstrip('image').rstrip('text')
            channel_topics = self.durablecache.get('radiovis_channels_topics')
            return topic in channel_topics
        except:
            self.logger.error("Error trying to check channel topic %s in cache." % (topic))
            return None

    def convert_fm_topic_to_gcc(self, topic):
        """Convert a fm topic using gcc instead of country code"""
        # /topic/fm/gcc/  <=> /topic/fm/cc/ . If it's a gcc, topic[13] = '/'
        if not topic[:10] == "/topic/fm/" or topic[13] == '/':
            return topic

        self.logger.debug("Converting %s to use gcc" % (topic,))

        try:
            cachevalue = self.durablecache.get('radiovis_isoecc_' + topic)
            if cachevalue:
                return cachevalue
        except (pylibmc.ConnectionError, pylibmc.ServerDown) as e:
            self.logger.warning("No memcached backend is running! %s" % (e,))

        def convert_topic():

            splited_topic = topic.split('/')

            cc = splited_topic[3]

            self.logger.debug("Querying gcc value for %s, nothing in cache !" % (cc,))

            result = self.do_query('get_gcc', {'cc': cc})
            if result is None:
                self.logger.error("No reply when convert_fm_topic_to_gcc ?")
                return topic  # Return the topic

            splited_topic[3] = result['gcc']

            gcc_topic = '/'.join(splited_topic)

            self.logger.debug("Setting radiovis_isoecc_ to durable cache topic list with %s." % (gcc_topic))
            try:
                self.durablecache.set('radiovis_isoecc_' + topic, gcc_topic, time=RadioDns.CACHE_DURATION)
            except (pylibmc.ConnectionError, pylibmc.ServerDown) as e:
                self.logger.warning("No memcached backend is running! %s" % (e,))
            return gcc_topic

        return self.cache.get(key='topic-to-gcc-' + topic, createfunc=convert_topic)

    def check_special_matchs(self, topic, topics):
        """Return true if topic is in the list of topics, using specials rules (eg. fm)"""

        # Only a special rule for fm
        if not topic[:10] == "/topic/fm/":
            return None

        # Check matches using gcc version
        topic = self.convert_fm_topic_to_gcc(topic)

        for subTopic in topics:
            subTopicConverted = self.convert_fm_topic_to_gcc(subTopic)
            if subTopicConverted == topic:
                return subTopic

        return None

    def get_all_channels(self):
        """Return the list of all channels"""

        result = self.do_query('get_all_channels', {})

        if result is None:
            self.logger.error("No reply when get_all_channels ?")
            return []

        retour = []

        for (topic, id) in result['list']:
            retour.append((self.convert_fm_topic_to_gcc(topic), id))

        return retour

    def get_all_vis_channels(self):
        """Return the list of all VIS channel that have an image"""

        result = self.do_query('get_all_vis_channels', {})

        if result is None:
            self.logger.error("No reply when get_all_vis_channels ?")
            return []

        retour = []

        for (topic, id) in result['list']:
            retour.append((self.convert_fm_topic_to_gcc(topic), id))

        return retour

    def get_channel_default(self, id):
        """Return the default image, link and message for a channel"""

        # Get out of cache if available
        try:
            cachevalue = self.durablecache.get('get_channel_default_' + str(id))
            if cachevalue:
                return cachevalue
        except (pylibmc.ConnectionError, pylibmc.ServerDown) as e:
            self.logger.warning("No memcached backend is running! %s" % (e,))

        result = self.do_query('get_channel_default', {'id': id})

        if result is None:
            self.logger.error("No reply when get_channel_default %s ?" % (id,))
            return []

        # Save to cache
        try:
            self.durablecache.set('get_channel_default_' + str(id), result['info'], time=RadioDns.CACHE_DURATION)
        except (pylibmc.ConnectionError, pylibmc.ServerDown) as e:
            self.logger.warning("No memcached backend is running! %s" % (e,))

        return result['info']

    def add_log(self, topic, message, headers, timestamp):
        """Add a log entry"""

        result = self.do_query('add_log', {'topic': topic, 'message': str(message), 'headers': json.dumps(headers),
                                           'timestamp': timestamp})

        if result is None:
            self.logger.error("No reply when add_log %s %s %s %s ?" % (topic, message, headers, timestamp,))

    def cleanup_logs(self, max_age):
        """Clean logs"""
        result = self.do_query('cleanup_logs', {'max_age': max_age})

        if result is None:
            self.logger.error("No reply when cleanup_logs ?")
Exemplo n.º 54
0
 def __init__(self, settings):
     self.cache_mgr = CacheManager(**parse_cache_config_options(settings))
     self.batch_timeout = int(settings.get('batch.user.session.timeout'))
Exemplo n.º 55
0
class Source:
  def __init__(self, path, expire):
    cache_opts = {
        'cache.type': 'dbm',
        'cache.data_dir': path,
        'cache.expire': expire
        }
    self.cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
    self.etag_cache = self.cache_manager.get_cache('etags', expire=365*24*60*60)
    self.fn_cache = self.cache_manager.get_cache('processed')
    self.url_cache = self.cache_manager.get_cache('urls')
    self.id_cache = self.cache_manager.get_cache('id')

  def url_cache_get_or_abort(self, url, code):
    try:
      val = self.url_cache._get_value(url)
      if val.has_value():
        ret = val._get_value()
        if ret:
          return ret
      raise SourceError(code)
    except Exception:
      raise SourceError(code)

  def url_cache_get_or_raise(self, url, error):
    val = self.url_cache._get_value(url)
    if val.has_value():
      val.namespace.acquire_read_lock()
      try:
        _stored, _expired, ret = val._get_value()
        if ret:
          return ret
      except Exception:
        raise error
      finally:
        val.namespace.release_read_lock()
    raise error

  def cache(self, *args, **kwargs):
    return self.cache_manager.cache(*args, **kwargs)

  def cache_with_id(self, key):
    def decorate(fn):
      def wrapper(*args, **kwargs):
        def create_id_cache_value():
          return fn(*args, **kwargs)
        return self.id_cache.get(key=key, createfunc=create_id_cache_value)

  def source(self, url):
    def decorate(fn):
      def wrapper(*args, **kwargs):
        def create_url_cache_value():
          headers = {}
          stored_etag = self.etag_cache.get(key=url, createfunc=lambda:None)
          if stored_etag:
            headers = {'If-None-Match': stored_etag}
          request = urllib2.Request(url, headers=headers)
          error = None
          error_code = None
          try:
            response = urllib2.urlopen(request)
          except urllib2.HTTPError, e:
            error_code = e.code
            error = e
          except urllib2.URLError, e:
            error = e
          if error_code == 304:
            return self.url_cache_get_or_raise(url, error)
          if error_code in (404, 410, 451):
            return flask.abort(e.code)
          if error:
            return self.url_cache_get_or_raise(url, error)
          etag = response.info().getheader('ETag', None)
          if etag:
            self.etag_cache.put(key=url, value=etag)
          return response.read()

        def create_fn_cache_value():
          if url:
            val = self.url_cache.get(key=url, createfunc=create_url_cache_value)
            return fn(val, *args, **kwargs)
          else:
            return fn(*args, **kwargs)

        try:
          return self.fn_cache.get(key=fn.__name__+url, createfunc=create_fn_cache_value)
        except SourceError, e:
          return flask.abort(e.code)
Exemplo n.º 56
0
	def createCacheFile():
        cache = CacheManager(**parse_cache_config_options(cache_opts))
		tmpl_cache = cache.get_cache('mytemplate', type='file', expire=5)
Exemplo n.º 57
0
class CacheManagerExt(Component):
    """To cache resource"""
    def get_cache(self, key, createfunc):
        """Get cached data of the returns of createfunc depending on the key.
        If key and createfunc exist in cache, returns the cached data,
        otherwise caches the returns of createfunc and returns data.

        :type key: String
        :param key: key name, present the unique key each time caching

        :type createfunc: function object
        :param createfunc: only the name of function, have no parameters,
            its return type can be any basic object, like String, int, tuple, list, dict, etc.

        :rtype: String
        :return: the value mapped to the key

        :example:
            CacheManager.get_cache(key="abc", createfunc=func)

        """
        results = self.tmpl_cache.get(key=key, createfunc=createfunc)
        return results

    def invalidate(self, key):
        """remove the key-value pair in the cache

        :type key: String
        :param key: key name, present the unique key each time caching

        :rtype: bool
        :return: True if remove the key-value pair correctly, otherwise False

        """
        try:
            self.tmpl_cache.remove_value(key=key)
            return True
        except Exception as e:
            self.log.error(e)
            return False

    def clear(self):
        """clear all the cache

        :rtype: bool
        :return: True if clear the cache correctly, otherwise False
        """
        try:
            self.tmpl_cache.clear()
            return True
        except Exception as e:
            self.log.error(e)
            return False

    def __init__(self):
        """initialize the class CacheManager
        More configuration refer to http://beaker.readthedocs.org/en/latest/caching.html#about
        """
        # store the basic configuration
        self.cache_opts = {
            'cache.type': 'file',
            # can be "memory" or "file"
            'cache.data_dir': '/tmp/cache/data',
            'cache.lock_dir': '/tmp/cache/lock'
        }
        # create CacheManager instance with cache_opts
        self.cache = CacheManager(**parse_cache_config_options(self.cache_opts))
        # In addition to the defaults supplied to the CacheManager instance,
        # any of the Cache options can be changed on a per-namespace basis,
        # by setting a type, and expire option.
        self.tmpl_cache = self.cache.get_cache('mytemplate', type='file', expire=3600)
Exemplo n.º 58
0
import gevent.pool
pool = gevent.pool.Pool(40)

import soundcloud
from time import time, strptime, mktime,clock
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': 'cache/data',
    'cache.lock_dir': 'cache/lock'
}

#Cache stores search results for 24 hours
cm = CacheManager(**parse_cache_config_options(cache_opts))
cache = cm.get_cache('trackcache', type='dbm', expire=3600*24)


client = soundcloud.Client(client_id='af912f440f0d027065e7351089b08a52')

def getPlaysPer(track):
	created_time = strptime(track.created_at[:-6],"%Y/%m/%d %H:%M:%S")
	plays_per = track.playback_count / ((time() - mktime(created_time)) / (3600*24))
	return plays_per

def getHype(track):
	if(track.playback_count > 500):
		hyperatio = float(track.favoritings_count) / float(track.playback_count)
		playsper = getPlaysPer(track)
		hype = (track.playback_count*playsper)**(hyperatio)
Exemplo n.º 59
0
import logging
import time
from datetime import datetime

from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

from app import app
from ..base import Sensor
from prod_config import ENDOMONDO_EMAIL, ENDOMONDO_PASSWORD
from .client import MobileApi

log = logging.getLogger(__name__)

opts = {"cache.type": "file", "cache.file_dir": ".cache", "cache.data_dir": ".cache"}
cache_manager = CacheManager(**parse_cache_config_options(opts))
cache = cache_manager.get_cache("endomondo_data", type="file", expire=3600 * 24)


def datetime_to_integer_unixtime(dt):
    """
    Converting from datetime type to unixtime
    """
    try:
        return int(time.mktime(dt.timetuple()))
    except AttributeError:  # pragma: no cover
        raise TypeError("datetime_to_unixtime expects datetime object, got %s instead" % type(dt))


class EndomondoSensor(Sensor):
    LOOP_DELAY = 600