def __init__(self, **kw): config = merge(defaults, settings.like("amqp"), kw) required = ("port", "username", "password", "host", "vhost", "exchange", "queue") require(self, config, required) self.__dict__.update(config) self.config = config self.reconnect()
def __init__(self, **kwargs): config = merge(defaults, settings.like('cassandra'), kwargs) require(self, config, ('cf_content', 'keyspace', 'servers', 'port')) self.__dict__.update(config) self.pool_size = len(self.servers) * 2 self.pool = pycassa.ConnectionPool( self.keyspace, self.servers, timeout=self.timeout, max_retries=self.max_retries, pool_timeout=self.pool_timeout, pool_size=self.pool_size, recycle=self.recycle, prefill=self.prefill, ) self.client = pycassa.ColumnFamily(self.pool, self.cf_content)
def __init__(self, **kw): config = merge(defaults, settings.like("mysql"), kw) require(self, config, ("host", "password", "username", "database")) self.config = config self.pool = MysqlConnectionPool(config)
def __init__(self, **kw): # use specialized cache if available, else default cache location self.config = merge(settings.like("memcached"), settings.like("header_cache"), kw) self.client = memcached.Memcached(**self.config)
def enable(): global ratelimit_cache ratelimit_cache = Memcached(**settings.like("ratelimit_cache"))
self.key = md5(resource).hexdigest() limits[resource] = self def token(self): for limit, (rate, interval) in self.limits.iteritems(): timestamps = window(interval) keys = [self.key+timestamp for timestamp in timestamps] results = ratelimit_cache.get_multi(*keys) gets = sum(map(int, results.values())) if gets >= rate: logger.error("RateLimit %s exceeded for %s" % (limit, self.resource)) return False if self.key+timestamps[0] not in results: ratelimit_cache.add(self.key+timestamps[0], '0') ratelimit_cache.incr(self.key+timestamps[0], 1) return True if not settings.get("disable_ratelimit", False): ratelimit_cache = Memcached(**settings.like("ratelimit_cache")) else: ratelimit_cache = None def enable(): global ratelimit_cache ratelimit_cache = Memcached(**settings.like("ratelimit_cache")) def disable(): global ratelimit_cache ratelimit_cache = None
def token(self): for limit, (rate, interval) in self.limits.iteritems(): timestamps = window(interval) keys = [self.key + timestamp for timestamp in timestamps] results = ratelimit_cache.get_multi(*keys) gets = sum(map(int, results.values())) if gets >= rate: logger.error("RateLimit %s exceeded for %s" % (limit, self.resource)) return False if self.key + timestamps[0] not in results: ratelimit_cache.add(self.key + timestamps[0], '0') ratelimit_cache.incr(self.key + timestamps[0], 1) return True if not settings.get("disable_ratelimit", False): ratelimit_cache = Memcached(**settings.like("ratelimit_cache")) else: ratelimit_cache = None def enable(): global ratelimit_cache ratelimit_cache = Memcached(**settings.like("ratelimit_cache")) def disable(): global ratelimit_cache ratelimit_cache = None
def __init__(self, **kw): config = merge(defaults, settings.like("memcached"), kw) require(self, config, ("host", "port")) self.config = config self.pool = {}