def __init__(self, **kwargs): """ init :param host: host :param port: port :param password: password :param db: db :return: """ self.name = "" kwargs.pop("username") self.__conn = Redis(connection_pool=BlockingConnectionPool( decode_responses=True, **kwargs))
def get_proxy(self): """ get a new proxy from proxy pool :return: """ session = Redis(connection_pool=BlockingConnectionPool( host='localhost', port=6379)) while True: try: proxies = list(session.hgetall('useful_proxy').keys()) new_proxy = choice(proxies).decode('utf8') break except: pass return new_proxy
def __init__(self, name, host, port, db=2): """ init :param name: hash name :param host: host :param port: port :param password: password :return: """ self.name = name self.conn = Redis(connection_pool=BlockingConnectionPool( host=host, port=port, db=db)) # 当加入一定量的新cookie后删除老化的cookie self.put_cnt = 0 self.get_cnt = 0 self.cache = None
def configure_caches(settings): global KEY_PREFIX global CACHE_VERSION global cache_status KEY_PREFIX = settings['redis.cache_key_prefix'] # append a timestamp to the cache key when running in dev. mode # (to make sure that the cache values are invalidated when the dev. # server reloads when the code changes) cache_version = settings['cache_version'] if settings['cache_version_timestamp'] == 'True': cache_version = '{0}-{1}'.format(cache_version, int(time.time())) CACHE_VERSION = cache_version log.debug('Cache version {0}'.format(CACHE_VERSION)) redis_url = '{0}?db={1}'.format(settings['redis.url'], settings['redis.db_cache']) log.debug('Cache Redis: {0}'.format(redis_url)) redis_pool = BlockingConnectionPool.from_url( redis_url, max_connections=int(settings['redis.cache_pool']), socket_connect_timeout=float(settings['redis.socket_connect_timeout']), socket_timeout=float(settings['redis.socket_timeout']), timeout=float(settings['redis.pool_timeout'])) for cache in caches: cache.configure( 'dogpile.cache.redis', arguments={ 'connection_pool': redis_pool, "thread_local_lock": False, 'distributed_lock': True, 'lock_timeout': 15, # 15 seconds (dogpile lock) 'redis_expiration_time': int(settings['redis.expiration_time']) }, replace_existing_backend=True) if settings.get('redis.cache_status_refresh_period'): refresh_period = int(settings['redis.cache_status_refresh_period']) else: refresh_period = 30 initialize_cache_status(refresh_period)
def configure_caches(settings): global KEY_PREFIX global CACHE_VERSION global cache_status KEY_PREFIX = settings['redis.cache_key_prefix'] # append a timestamp to the cache key when running in dev. mode # (to make sure that the cache values are invalidated when the dev. # server reloads when the code changes) cache_version = settings['cache_version'] if settings['cache_version_timestamp'] == 'True': cache_version = '{0}-{1}'.format(cache_version, int(time.time())) CACHE_VERSION = cache_version log.debug('Cache version {0}'.format(CACHE_VERSION)) redis_url = '{0}?db={1}'.format( settings['redis.url'], settings['redis.db_cache']) log.debug('Cache Redis: {0}'.format(redis_url)) redis_pool = BlockingConnectionPool.from_url( redis_url, max_connections=int(settings['redis.cache_pool']), timeout=3, # 3 seconds (waiting for connection) socket_timeout=3 # 3 seconds (timeout on open socket) ) for cache in caches: cache.configure( 'dogpile.cache.redis', arguments={ 'connection_pool': redis_pool, 'distributed_lock': True, 'lock_timeout': 15 # 15 seconds (dogpile lock) }, replace_existing_backend=True ) if settings.get('redis.cache_status_refresh_period'): refresh_period = int(settings['redis.cache_status_refresh_period']) else: refresh_period = 30 cache_status = CacheStatus(refresh_period)
def __init__(self, db_url, **kwargs): self.db_url = db_url or os.environ.get('DATABASE_URL') if not self.db_url: raise ValueError('You must provide a db_url.') o = utils.db_url_parser(db_url) self.dbcfg = { 'host' : o['host'], 'port' : o['port'] or 6379, 'username' : o['usr'], 'password' : o['pwd'], 'db' : o['db'] if 'db' in o and o['db'] and o['db'] != '' else 0, 'decode_responses' : True, } try: self.dbcfg['db'] = int(o['db']) except (AttributeError, ValueError): self.dbcfg['db'] = 0 self.dbcfg.update(kwargs) self._conn = Redis(connection_pool=BlockingConnectionPool(**self.dbcfg))
def __init__(self, request): host, port = get_sa_server(request) self.__conn = Redis( connection_pool=BlockingConnectionPool(host=host, port=int(port)))
def __init__(self, name, **kwargs): self.name = name self.conn = Redis(connection_pool=BlockingConnectionPool(**kwargs))
def _make_client(self, info): pool = BlockingConnectionPool(**info) return StrictRedis(connection_pool=pool)
def make_redis_conn(config): return Redis(connection_pool=BlockingConnectionPool( max_connections=int(config['redis_cache_max_conns']), host=config["redis_cache_host"], port=int(config['redis_cache_port']), db=int(config['redis_cache_db'])))
def __init__(self, request): host, port, password = getSAServer(request) if password == '': self.__conn = Redis(connection_pool=BlockingConnectionPool(host=host, port=int(port))) else: self.__conn = Redis(connection_pool=BlockingConnectionPool(host=host, port=int(port), password=password))
def __init__(self): self.conn = Redis( connection_pool=BlockingConnectionPool(host=REDIS_HOST, password=REDIS_PASSWORD, db=REDIS_DB, decode_responses=True))
from stats import * #from global_var import hbase_conn #from web_global import hbase_conn INTERNAL_ERROR = {"code": 121101, "message": "server internal error"} PARAMS_ERROR = {"code": 121702, "message": "params is error"} NO_TASK_ERROR = {"code": 121701, "message": "no such task"} IN_PROGRESS = {"code": 121703, "message": "in progress"} #from global_var import hbase_conn #hbase_conn= happybase.ConnectionPool(int(config['hbase']['max_conns']), # host=config['hbase']['hostname'], # port=int(config['hbase']['port'])) redis_conn = Redis(connection_pool=BlockingConnectionPool( max_connections=int(config["redis_cache"]["max_conns"]), host=config["redis_cache"]["host"], port=int(config['redis_cache']['port']), db=int(config['redis_cache']['normal_db']), socket_timeout=int(config['redis_cache']['timeout']))) def wrap_error(code, message, data): return json.dumps({ "jsonrpc": "2.0", "id": "null", "error": { "code": code, "message": message, "data": data } })
def create_block_redis_connection_pool(host='localhost', port=6379, db=0, password=None, socket_timeout=None, socket_connect_timeout=None, socket_keepalive=None, socket_keepalive_options=None, unix_socket_path=None, encoding='utf-8', encoding_errors='strict', charset=None, errors=None, decode_responses=False, retry_on_timeout=False, ssl=False, ssl_keyfile=None, ssl_certfile=None, ssl_cert_reqs=None, ssl_ca_certs=None, max_connections=None): """ StrictRedis默认使用的连接池非线程安全 创建一个线程安全的连接池 max_connections: 为None时,默认为50, @see BlockingConnectionPool """ # copied from pyredis.StrictRedis __init__ # but use BlockingConnectionPool instead of ConnectionPool if charset is not None: warnings.warn( DeprecationWarning( '"charset" is deprecated. Use "encoding" instead')) encoding = charset if errors is not None: warnings.warn( DeprecationWarning( '"errors" is deprecated. Use "encoding_errors" instead')) encoding_errors = errors kwargs = { 'db': db, 'password': password, 'socket_timeout': socket_timeout, 'encoding': encoding, 'encoding_errors': encoding_errors, 'decode_responses': decode_responses, 'retry_on_timeout': retry_on_timeout } # based on input, setup appropriate connection args if unix_socket_path is not None: kwargs.update({ 'path': unix_socket_path, 'connection_class': UnixDomainSocketConnection }) else: # TCP specific options kwargs.update({ 'host': host, 'port': port, 'socket_connect_timeout': socket_connect_timeout, 'socket_keepalive': socket_keepalive, 'socket_keepalive_options': socket_keepalive_options, }) if ssl: kwargs.update({ 'connection_class': SSLConnection, 'ssl_keyfile': ssl_keyfile, 'ssl_certfile': ssl_certfile, 'ssl_cert_reqs': ssl_cert_reqs, 'ssl_ca_certs': ssl_ca_certs, }) if max_connections: kwargs["max_connections"] = max_connections connection_pool = BlockingConnectionPool(**kwargs) return connection_pool
def __init__(self, name, host, port): self.name = name self.session = Redis(connection_pool=BlockingConnectionPool(host=host, port=port))
def __init__(self): self.db = Redis(connection_pool=BlockingConnectionPool(host=REDIS_HOST, password=REDIS_PWD))
# ------------ 状态转发服务器 ------------ STATUS_SERVER = '10.2.161.15' PORT = 8022 # ------------ redis 相关配置 ------------ REDIS_HOST = '10.2.161.15' REDIS_PORT = 6379 REDIS_DB_NUM = 1 REDIS_PASSWORD = '******' # ------------- 连接池 --------------- # redis数据库 # 阻塞式连接池 pool = BlockingConnectionPool(max_connections=20, timeout=5, socket_timeout=5, \ host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB_NUM, password=REDIS_PASSWORD) REDIS_DB = redis.StrictRedis(connection_pool=pool) # -------------- 日志接收服务器 --------- LOG_HOST = 'localhost' LOG_PORT = 9030 # ------------ 初始化 日志记录器 ------------ initlog('task.generator', LOG_HOST, LOG_PORT)
def __init__(self, tb_name, host, port): self.name = tb_name self.__conn = Redis( connection_pool=BlockingConnectionPool(host=host, port=port))
def __init__(self, **kwargs): self._pool = BlockingConnectionPool(**kwargs)