def get_redis_client(): """ 获取一个redis连接 :return: """ server_url = settings.REDIS_SERVER_URL return StrictRedis(connection_pool=ConnectionPool.from_url(server_url))
def __init__(self, rabbit_cfg, rmq_ws_cfg, db_cfg, redis_cfg, worker_type): """ :param dict rabbit_cfg: config rabbit :param dict rmq_ws_cfg: config rmq_ws :param dict db_cfg: config db :param dict redis_cfg: config redis :param BaseEtlListener worker_type: Type of worker (listener) """ self.worker_type = worker_type self._rmq_user = rabbit_cfg["user"] self._rmq_pwd = rabbit_cfg["password"] self._queue = rabbit_cfg['{}_queue'.format(self.worker_type)] self._ack = 'auto' self._db_conn = psycopg2.connect(**db_cfg) self.rmq_ws_publisher = Publisher(user=rmq_ws_cfg["user"], password=rmq_ws_cfg["password"], host=rmq_ws_cfg["host"]) # self.rmq_ws_channel = self.rmq_ws_connection.channel() rmq_host = (rabbit_cfg["host"], rabbit_cfg["port"]) self._conn = stomp.Connection([rmq_host]) self.redis_storage_conn = ConnectionPool.from_url( "redis://{}:{}/{}".format(redis_cfg["host"], redis_cfg["port"], redis_cfg['db'])) listener = LISTENER_MAP[self.worker_type] self._conn.set_listener( self.worker_type, listener(db_conn=self._db_conn, redis_storage_conn=self.redis_storage_conn, rmq_ws_publisher=self.rmq_ws_publisher)) self._subscribe_and_connect()
def __init__(self, name='huey', blocking=True, read_timeout=1, connection_pool=None, url=None, client_name=None, **connection_params): if Redis is None: raise ConfigurationError('"redis" python module not found, cannot ' 'use Redis storage backend. Run "pip ' 'install redis" to install.') # Drop common empty values from the connection_params. for p in ('host', 'port', 'db'): if p in connection_params and connection_params[p] is None: del connection_params[p] if sum(1 for p in (url, connection_pool, connection_params) if p) > 1: raise ConfigurationError( 'The connection configuration is over-determined. ' 'Please specify only one of the following: ' '"url", "connection_pool", or "connection_params"') if url: connection_pool = ConnectionPool.from_url( url, decode_components=True) elif connection_pool is None: connection_pool = ConnectionPool(**connection_params) self.pool = connection_pool self.conn = self.redis_client(connection_pool=connection_pool) self.connection_params = connection_params super(RedisStorage, self).__init__(name, blocking, read_timeout, **connection_params)
def get_filter_redis(): redis_args = get_redis_args() filter_redis_uri = "redis://:{}@{}:{}/{}".format(redis_args.get('password'), redis_args.get('host'), redis_args.get('port'), redis_args.get('filter_db')) pool = ConnectionPool.from_url(filter_redis_uri) filter_redis = StrictRedis(connection_pool=pool) return filter_redis
def get_connection_pool(self): return ConnectionPool.from_url( self.broker_url, socket_connect_timeout=self._socket_connect_timeout, socket_timeout=self.connection_timeout, client_name=self._client_name, )
def __init__(self, *args, **kwargs): self.rooms = kwargs.pop('rooms', []) self.nickname = kwargs.pop('nickname', 'Dropbot') self.cmd_prefix = kwargs.pop('cmd_prefix', '!') self.kos_url = kwargs.pop('kos_url', 'http://kos.cva-eve.org/api/') self.hidden_commands = ['cmd_prefix'] self.last_killdate = datetime.utcnow() self.kill_corps = [int(x) for x in kwargs.pop('kill_corps', [])] self.kills_disabled = kwargs.pop('kills_disabled', '0') == '1' self.kills_muted = False self.office_api_key_keyid = kwargs.pop('office_api_keyid', None) self.office_api_key_vcode = kwargs.pop('office_api_vcode', None) self.redis_pool = ConnectionPool.from_url( kwargs.pop('redis_url', 'redis://localhost:6379/0')) self.redis = Redis(connection_pool=self.redis_pool) self.map = Map.from_json(pkgutil.get_data('dropbot', 'data/map.json')) jid = kwargs.pop('jid', None) password = kwargs.pop('password', None) super(DropBot, self).__init__(jid, password) self.register_plugin('xep_0030') # Service Discovery self.register_plugin('xep_0045') # Multi-User Chat self.register_plugin('xep_0199') # XMPP Ping # Basic bot auto config self.auto_subscribe = False self.auto_authorize = True # Handlers self.add_event_handler('session_start', self.handle_session_start) self.add_event_handler('message', self.handle_message)
def __init__(self, exchange_code): self.name = exchange_code self.kline_config = KLINES_CONFIG.pop('exchanges')[exchange_code] self.kline_config.update(KLINES_CONFIG) self.exchange_config = EXCHANGE_INFO[exchange_code].pop('kline_rate') self.exchange_config.update(EXCHANGE_INFO[exchange_code]) self.logging = get_logger(exchange_code) for key, value in self.kline_config.items(): setattr(self, key, value) for key, value in self.exchange_config.items(): setattr(self, key, value) self.sessions = [aiohttp.ClientSession( connector=aiohttp.TCPConnector(verify_ssl=False, local_addr=addr) ) for addr in self.local_addrs] client = MongoClient(MONGO_CLIENT) self.db = client["Klines"] self.sr = StrictRedis(connection_pool=ConnectionPool.from_url(REDIS_CLIENT)) self.datas = self.get_coinpair() if self.name in self.reload_list: self.reload_market_id() self.symbol_id = {_info["pair_name"]: _info["pair_api_name"] for _info in self.datas} self.session_distribute()
def __init__(self, *args, **kwargs): self.status = type('RedisStatus', (), {'alive_timestamp': 0}) startup_nodes = kwargs.get('startup_nodes') if startup_nodes: host = startup_nodes[0].get('host') password = startup_nodes[0].get('password') if 'redis://' not in host: kwargs['host'] = host kwargs['port'] = startup_nodes[0].get('port') if sys.version > '3': kwargs.pop('startup_nodes') else: del kwargs['startup_nodes'] kwargs['password'] = password else: url = host.format(password=password) connection_pool = ConnectionPool.from_url(url, db=0, **kwargs) kwargs['connection_pool'] = connection_pool if sys.version > '3': kwargs.pop('startup_nodes') else: del kwargs['startup_nodes'] super(SingleRedisClient, self).__init__( max_connections=128, decode_responses=True, *args, **kwargs ) self.set_response_callback('GET', self._get) self.set_response_callback('HGETALL', self._hgetall) self.set_response_callback('HGET', self._hget) self.set_response_callback('HMGET', self._hmget) gevent.spawn(self._alive_check) gevent.sleep()
def get_redis_pool(): if not hasattr(settings, "_redis_pool"): pool = ConnectionPool.from_url(settings.REDIS_URL, decode_responses=True) settings._redis_pool = pool wait_for_redis(pool) return settings._redis_pool
def __init__(self): Borg.__init__(self) database_user = get_docker_secret("seraphsix_pg_db_user", default="seraphsix") database_password = get_docker_secret("seraphsix_pg_db_pass") database_host = get_docker_secret("seraphsix_pg_db_host", default="localhost") database_port = get_docker_secret("seraphsix_pg_db_port", default="5432") database_name = get_docker_secret("seraphsix_pg_db_name", default="seraphsix") self.database_conns = get_docker_secret("seraphsix_pg_db_conns", default=DB_MAX_CONNECTIONS, cast_to=int) database_auth = f"{database_user}:{database_password}" self.database_url = f"postgres://{database_auth}@{database_host}:{database_port}/{database_name}" redis_password = get_docker_secret("seraphsix_redis_pass") redis_host = get_docker_secret("seraphsix_redis_host", default="localhost") redis_port = get_docker_secret("seraphsix_redis_port", default="6379") self.redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}" self.arq_redis = RedisSettings.from_dsn(f"{self.redis_url}/1") self.destiny = DestinyConfig() self.the100 = The100Config() self.twitter = TwitterConfig() self.discord_api_key = get_docker_secret("discord_api_key") self.home_server = get_docker_secret("home_server", cast_to=int) self.log_channel = get_docker_secret("home_server_log_channel", cast_to=int) self.reg_channel = get_docker_secret("home_server_reg_channel", cast_to=int) self.enable_activity_tracking = get_docker_secret( "enable_activity_tracking", cast_to=bool) self.flask_app_key = (os.environb[b"FLASK_APP_KEY"].decode( "unicode-escape").encode("latin-1")) self.activity_cutoff = get_docker_secret("activity_cutoff") if self.activity_cutoff: self.activity_cutoff = datetime.strptime( self.activity_cutoff, "%Y-%m-%d").astimezone(tz=pytz.utc) self.root_log_level = get_docker_secret("root_log_level", default=ROOT_LOG_LEVEL, cast_to=str) bucket_kwargs = { "redis_pool": ConnectionPool.from_url(self.redis_url), "bucket_name": "ratelimit", } destiny_api_rate = RequestRate(20, Duration.SECOND) self.destiny_api_limiter = Limiter(destiny_api_rate, bucket_class=RedisBucket, bucket_kwargs=bucket_kwargs)
def __init__(self, url: str, decode: bool = False): """ 构造函数 :param url: 例: redis://:[email protected]:6379/0 :param decode: 密码是否加密 """ pool = ConnectionPool.from_url(url) self.redis = StrictRedis(connection_pool=pool, decode_responses=decode)
def __init__(self): super(redis, self).__init__() configs = conf.config.configs['redis'] url = "redis://:" + configs['pass'] + "@" + configs[ 'host'] + ":" + configs['port'] + "/1" pool = ConnectionPool.from_url(url) self.redis = StrictRedis(connection_pool=pool) self.expireTime = configs['expireTime']
def __init__(self, connection_pool=None, url=None, **connection_params): if url: connection_pool = ConnectionPool.from_url(url, decode_components=True) elif connection_pool is None: connection_pool = ConnectionPool(**connection_params) self.pool = connection_pool self.conn = self.redis_client(connection_pool=connection_pool)
def start_requests(self): # load link in Redis into spider redis_uri = 'redis://192.168.1.59:6379/2' redis_pool = ConnectionPool.from_url(redis_uri) redis_client = StrictRedis(connection_pool=redis_pool) for city in ['sh', 'su']: for redis_url in redis_client.sscan_iter('%s_ershoufang_sell:link' % city): yield scrapy.Request(url=redis_url.decode('utf-8'), meta={'city': city}, callback=self.parse, dont_filter=True)
def __init__(self, type, site): """ 初始化 Redis 数据库 :param type: 存储类型 :param site: redis 连接 """ self.db = StrictRedis(connection_pool=ConnectionPool.from_url(REDIS_URL)) self.type = type self.site = site
def start_requests(self): redis_uri = 'redis://:[email protected]:6379/1' redis_pool = ConnectionPool.from_url(redis_uri) redis_client = StrictRedis(connection_pool=redis_pool) set_name = 'lianjia_sale_day:link' for redis_url in redis_client.sscan_iter(set_name): redis_client.srem(set_name, redis_url) yield scrapy.Request(url=redis_url.decode('utf-8'), callback=self.parse_detail, dont_filter=True)
class UserSpider(scrapy.Spider): name = 'user' allowed_domains = ['music.163.com'] client = MongoClient("localhost", 27017) db = client["netease"] coll = db["user"] pool = ConnectionPool.from_url('redis://@192.168.28.110:6379/0') red = StrictRedis(connection_pool=pool) proxy = "http://" + str(red.srandmember("proxies"), encoding='utf-8').strip() def start_requests(self): for i in range(1000000): user_id = str(self.red.lpop("queue"), encoding='utf-8') url = "http://music.163.com/api/user/getfollows/{userId}?offset={offset}&limit=300&order=true" # url = "http://music.163.com/api/user/getfolloweds?userId={userId}&offset={offset}&limit=300&order=true" offset = 0 u = url.format(userId=user_id, offset=offset) if not self.red.sismember("scraped", user_id): yield scrapy.Request(u, meta={ "offset": offset, "user_id": user_id, "proxy": self.proxy, "download_timeout": 20 }, dont_filter=True) def parse(self, response): url = response.url user_id = response.meta['user_id'] offset = response.meta["offset"] res = json.loads(response.body_as_unicode()) items = res['follow'] for item in items: uid = item["userId"] if not self.red.sismember("users", uid): yield UserItem(item) if res["more"]: old = "offset={offset}".format(offset=offset) offset += 300 yield scrapy.Request(url.replace( old, "offset={offset}".format(offset=offset)), meta={ "offset": offset, "user_id": user_id, "proxy": self.proxy, "download_timeout": 20 }, dont_filter=True) else: self.red.sadd("scraped", user_id)
def deduplicate(self, title): ''' Prevents the re-adding of sling downloads @params title - name of granule to check if downloading ''' key = config()['dedup_redis_key'] global POOL if POOL is None: POOL = ConnectionPool.from_url(REDIS_URL) r = StrictRedis(connection_pool=POOL) return r.sadd(key, title) == 0
def saveStamp(self, stamp, key): ''' Save the query time for later use @param stamp: timestamp to save in key @param key: name of the key to set ''' global POOL if POOL is None: POOL = ConnectionPool.from_url(REDIS_URL) r = StrictRedis(connection_pool=POOL) r.set(key, stamp)
def loadStamp(self, key): ''' Load the query time for this key @param key: key to get stamp for @return: time of last query, as string ''' global POOL if POOL is None: POOL = ConnectionPool.from_url(REDIS_URL) r = StrictRedis(connection_pool=POOL) last = r.get(key) return last
def __init__(self, type, site): """ 初始化 Redis 数据库 :param type: 存储类型 :param site: redis 连接 """ self.db = StrictRedis( connection_pool=ConnectionPool.from_url(REDIS_URL), socket_timeout=5, decode_responses=True) self.type = type self.site = site
def open_spider(self, spider): """ 爬虫开始执行时,调用 :param spider: :return: """ pool = ConnectionPool.from_url(self.redis_url) self.redis = StrictRedis(connection_pool=pool) print('爬虫开始执行.....%s' % spider) fill_all = os.path.join(self.path, spider.name + '_all.txt') file_phone = os.path.join(self.path, spider.name + '_phone.txt') self.file_all = open(fill_all, 'a+', encoding='utf-8') self.file_phone = open(file_phone, 'a+', encoding='utf-8')
def get_redis_pool(settings, db): redis_url = settings[REDIS_URL_SETTING] redis_timeout = int(settings[REDIS_TIMEOUT_SETTING]) if not redis_url: raise RedisNotConfigured('Missing redis url: %s not set' % REDIS_URL_SETTING) if not redis_url.startswith('redis://'): redis_url = 'redis://' + redis_url return ConnectionPool.from_url(url=redis_url, socket_timeout=redis_timeout, db=db)
def parse(self, response): list_uid = self.gen_uid() base_url = "http://www.okooo.com/member/{uid}/" redis_url = settings.REDIS_URL pool = ConnectionPool.from_url(redis_url) redis_client = StrictRedis(connection_pool=pool) for uid in list_uid: redis_client.sadd('crawl_uid', uid) len_uuid = redis_client.scard('crawl_uid') while len_uuid > 0: len_uuid -= 1 uid = redis_client.spop('crawl_uid').decode("utf-8") # print("spop uid===%s, type(uid)===%s" % (uid, type(uid))) url = base_url.format(uid=uid) yield Request(url=url, callback=self.parse_uid, meta={'uid': uid})
def __init__(self): self.client = StrictRedis( connection_pool=ConnectionPool.from_url(self.redis_url)) self.task_queue = self.task_queue_class(self.client, self.spider_name) self.pipeline = self.pipeline_class(self.site, self.mongo_batch_size) self.duplicator = Duplicator(self.client, spider_name=self.spider_name, ttl=self.ttl, interval=self.interval) self.counter = Counter({ 'req': '请求数', 'item': '项目数', 'ingore': '忽略请求数' }) self.error_cnt = 0
def get_redis_pool(db): redis_url = settings["REDIS_URL"] if redis_url is None: # Fallback to old setting name redis_url = settings["WIKI_API_REDIS_URL"] if redis_url: logger.warning('"WIKI_API_REDIS_URL" setting is deprecated. Use REDIS_URL instead') if not redis_url: raise RedisNotConfigured("Redis URL is not set") if not redis_url.startswith("redis://"): redis_url = "redis://" + redis_url return ConnectionPool.from_url(url=redis_url, socket_timeout=REDIS_TIMEOUT, db=db)
def create_redis(registry: Registry, connection_url=None, redis_client=StrictRedis, max_connections=16, **redis_options) -> StrictRedis: """Sets up Redis connection pool once at the start of a process. Connection pool life cycle is the same as Pyramid registry which is the life cycle of a process (all threads). :param max_connections: Default per-process connection pool limit """ # if no url passed, try to get it from pyramid settings url = registry.settings.get( 'redis.sessions.url') if connection_url is None else connection_url # otherwise create a new connection if url is not None: # remove defaults to avoid duplicating settings in the `url` redis_options.pop('password', None) redis_options.pop('host', None) redis_options.pop('port', None) redis_options.pop('db', None) # the StrictRedis.from_url option no longer takes a socket # argument. instead, sockets should be encoded in the URL if # used. example: # unix://[:password]@/path/to/socket.sock?db=0 redis_options.pop('unix_socket_path', None) # connection pools are also no longer a valid option for # loading via URL redis_options.pop('connection_pool', None) process_name = os.getpid() thread_name = threading.current_thread().name logger.info( "Creating a new Redis connection pool. Process %s, thread %s, max_connections %d", process_name, thread_name, max_connections) connection_pool = ConnectionPool.from_url( url, max_connections=max_connections, **redis_options) redis = StrictRedis(connection_pool=connection_pool) else: raise RuntimeError( "Redis connection options missing. Please configure redis.sessions.url" ) return redis
def _make_redis_client(self, host, port): if host is not None and port is not None: import redis from redis import ConnectionPool from redis import StrictRedis cache_connection_url = 'redis://{}:{}'.format(host, port) connection_pool = ConnectionPool.from_url(cache_connection_url) client = StrictRedis(connection_pool=connection_pool, decode_responses=True) logger.info( f'Connecting to Redis at namespace {self._namespace}...') else: client = MockRedis() logger.info('Using mock Redis...') return client
def start_requests(self): redis_uri = 'redis://:[email protected]:6379/1' redis_pool = ConnectionPool.from_url(redis_uri) redis_client = StrictRedis(connection_pool=redis_pool) set_name = 'lianjia_xiaoqu_day:link' for i in redis_client.sscan_iter(set_name): redis_client.srem(set_name, i) tmp = i.decode('utf-8').split('+') community = tmp[0] if tmp[1]: start_url = re.search(r'https://\w+.lianjia.com/xiaoqu/', tmp[1])[0] # print(start_url+'rs'+community+'/') yield scrapy.Request(url=start_url + 'rs' + community + '/', callback=self.parse, dont_filter=True)
def __init__(self, name='huey', blocking=True, read_timeout=1, connection_pool=None, url=None, client_name=None, **connection_params): if Redis is None: raise ConfigurationError('"redis" python module not found, cannot ' 'use Redis storage backend. Run "pip ' 'install redis" to install.') # Drop common empty values from the connection_params. for p in ('host', 'port', 'db'): if p in connection_params and connection_params[p] is None: del connection_params[p] if sum(1 for p in (url, connection_pool, connection_params) if p) > 1: raise ConfigurationError( 'The connection configuration is over-determined. ' 'Please specify only one of the the following: ' '"url", "connection_pool", or "connection_params"') if url: connection_pool = ConnectionPool.from_url(url, decode_components=True) elif connection_pool is None: connection_pool = ConnectionPool(**connection_params) self.pool = connection_pool self.conn = self.redis_client(connection_pool=connection_pool) self.connection_params = connection_params self._pop = self.conn.register_script(SCHEDULE_POP_LUA) self.name = self.clean_name(name) self.queue_key = 'huey.redis.%s' % self.name self.schedule_key = 'huey.schedule.%s' % self.name self.result_key = 'huey.results.%s' % self.name self.error_key = 'huey.errors.%s' % self.name if client_name is not None: self.conn.client_setname(client_name) self.blocking = blocking self.read_timeout = read_timeout
def connect_to_redis(): u''' (Lazily) connect to Redis. The connection is set up but not actually established. The latter happens automatically once the connection is used. :returns: A lazy Redis connection. :rtype: ``redis.Redis`` .. seealso:: :py:func:`is_redis_available` ''' global _connection_pool if _connection_pool is None: url = config.get(REDIS_URL_SETTING_NAME, REDIS_URL_DEFAULT_VALUE) log.debug(u'Using Redis at {}'.format(url)) _connection_pool = ConnectionPool.from_url(url) return Redis(connection_pool=_connection_pool)
def __init__(self, *args, **kwargs): self.rooms = kwargs.pop('rooms', []) self.nickname = kwargs.pop('nickname', 'Dropbot') self.cmd_prefix = kwargs.pop('cmd_prefix', '!') self.kos_url = kwargs.pop('kos_url', 'http://kos.cva-eve.org/api/') self.hidden_commands = ['cmd_prefix'] self.last_killdate = datetime.utcnow() self.kill_corps = [int(x) for x in kwargs.pop('kill_corps', [])] self.kills_disabled = kwargs.pop('kills_disabled', '0') == '1' self.kills_muted = False self.office_api_key_keyid = kwargs.pop('office_api_keyid', None) self.office_api_key_vcode = kwargs.pop('office_api_vcode', None) self.market_systems = kwargs.pop('market_systems', ['Jita', 'Amarr', 'Rens', 'Dodixie', 'Hek']) if 'redis_url' in kwargs: self.redis_pool = ConnectionPool.from_url(kwargs.pop('redis_url', 'redis://localhost:6379/0')) self.redis = Redis(connection_pool=self.redis_pool) else: logging.warning('No DROPBOT_REDIS_URL defined, EVE API calls will not be cached!') self.redis = None self.map = Map.from_json(pkgutil.get_data('dropbot', 'data/map.json')) jid = kwargs.pop('jid', None) password = kwargs.pop('password', None) super(DropBot, self).__init__(jid, password) self.register_plugin('xep_0030') # Service Discovery self.register_plugin('xep_0045') # Multi-User Chat self.register_plugin('xep_0199') # XMPP Ping # Basic bot auto config self.auto_subscribe = False self.auto_authorize = True # Handlers self.add_event_handler('session_start', self.handle_session_start) self.add_event_handler('message', self.handle_message)
def create_redis(registry: Registry, connection_url=None, redis_client=StrictRedis, max_connections=16, **redis_options) -> StrictRedis: """Sets up Redis connection pool once at the start of a process. Connection pool life cycle is the same as Pyramid registry which is the life cycle of a process (all threads). :param max_connections: Default per-process connection pool limit """ # if no url passed, try to get it from pyramid settings url = registry.settings.get('redis.sessions.url') if connection_url is None else connection_url # otherwise create a new connection if url is not None: # remove defaults to avoid duplicating settings in the `url` redis_options.pop('password', None) redis_options.pop('host', None) redis_options.pop('port', None) redis_options.pop('db', None) # the StrictRedis.from_url option no longer takes a socket # argument. instead, sockets should be encoded in the URL if # used. example: # unix://[:password]@/path/to/socket.sock?db=0 redis_options.pop('unix_socket_path', None) # connection pools are also no longer a valid option for # loading via URL redis_options.pop('connection_pool', None) process_name = os.getpid() thread_name = threading.current_thread().name logger.info("Creating a new Redis connection pool. Process %s, thread %s, max_connections %d", process_name, thread_name, max_connections) connection_pool = ConnectionPool.from_url(url, max_connections=max_connections, **redis_options) redis = StrictRedis(connection_pool=connection_pool) else: raise RuntimeError("Redis connection options missing. Please configure redis.sessions.url") return redis
def __init__(self, name='huey', blocking=True, read_timeout=1, connection_pool=None, url=None, client_name=None, **connection_params): if Redis is None: raise ConfigurationError('"redis" python module not found, cannot ' 'use Redis storage backend. Run "pip ' 'install redis" to install.') if sum(1 for p in (url, connection_pool, connection_params) if p) > 1: raise ConfigurationError( 'The connection configuration is over-determined. ' 'Please specify only one of the the following: ' '"url", "connection_pool", or "connection_params"') if url: connection_pool = ConnectionPool.from_url( url, decode_components=True) elif connection_pool is None: connection_pool = ConnectionPool(**connection_params) self.pool = connection_pool self.conn = self.redis_client(connection_pool=connection_pool) self.connection_params = connection_params self._pop = self.conn.register_script(SCHEDULE_POP_LUA) self.name = self.clean_name(name) self.queue_key = 'huey.redis.%s' % self.name self.schedule_key = 'huey.schedule.%s' % self.name self.result_key = 'huey.results.%s' % self.name self.error_key = 'huey.errors.%s' % self.name if client_name is not None: self.conn.client_setname(client_name) self.blocking = blocking self.read_timeout = read_timeout
def from_url(cls, url, db=None, **kwargs): namespace = kwargs.pop('namespace') connection_pool = ConnectionPool.from_url(url, db=db, **kwargs) return cls(connection_pool=connection_pool, namespace=namespace)
# CONNECT TO redis METHOD 1) redis = StrictRedis(host='localhost', port=6397, db=0) # CONNECT TO redis METHOD 2) pool = ConnectionPool(host='localhost', port=6397, db=0) redis = StrictRedis(ConnectionPool) # CONNECT TO redis METHOD 3) : via URL # redis = StrictRedis.from_url(url='redis://*****:*****@localhost:6379/1') redis = StrictRedis(connection_pool=pool) # ------------------------------------------------------ # OP demo # 1) simple set key-value and query import redis r = redis.Redis(host='127.0.0.1', port=6379) r.set('key1', 'abc') print (r.get('key1'))
_signal_encoder = import_string(settings.WEBSOCKET_SIGNAL_ENCODER) _topic_serializer = import_string(settings.WEBSOCKET_TOPIC_SERIALIZER) __values = { "host": settings.WEBSOCKET_REDIS_HOST, "port": ":%s" % settings.WEBSOCKET_REDIS_PORT if settings.WEBSOCKET_REDIS_PORT else "", "db": settings.WEBSOCKET_REDIS_DB, "password": "******" % settings.WEBSOCKET_REDIS_PASSWORD if settings.WEBSOCKET_REDIS_PASSWORD else "", } redis_connection_pool = ConnectionPool.from_url( "redis://%(password)s%(host)s%(port)s/%(db)s" % __values ) def get_websocket_redis_connection(): """Return a valid Redis connection, using a connection pool.""" return StrictRedis(connection_pool=redis_connection_pool) def set_websocket_topics(request, *topics): """Use it in a Django view for setting websocket topics. Any signal sent to one of these topics will be received by the client. :param request: :class:`django.http.request.HttpRequest` :param topics: list of topics that will be subscribed by the websocket (can be any Python object). """