def test_master_for(cluster, sentinel, master_ip): master = sentinel.master_for("mymaster", db=9) assert master.ping() assert master.connection_pool.master_address == (master_ip, 6379) # Use internal connection check master = sentinel.master_for("mymaster", db=9, check_connection=True) assert master.ping()
def test_master_for(cluster, sentinel): master = sentinel.master_for('mymaster', db=9) assert master.ping() assert master.connection_pool.master_address == ('127.0.0.1', 6379) # Use internal connection check master = sentinel.master_for('mymaster', db=9, check_connection=True) assert master.ping()
def _get_redis_client(driver): conf = driver.redis_conf connection_uri = ConnectionURI(conf.uri) if connection_uri.strategy == STRATEGY_SENTINEL: sentinel = redis.sentinel.Sentinel( connection_uri.sentinels, db=connection_uri.dbid, password=connection_uri.password, socket_timeout=connection_uri.socket_timeout) # NOTE(prashanthr_): The socket_timeout parameter being generic # to all redis connections is inherited from the parameters for # sentinel. return sentinel.master_for(connection_uri.master) elif connection_uri.strategy == STRATEGY_TCP: return redis.StrictRedis(host=connection_uri.hostname, port=connection_uri.port, db=connection_uri.dbid, password=connection_uri.password, socket_timeout=connection_uri.socket_timeout) else: return redis.StrictRedis( unix_socket_path=connection_uri.unix_socket_path, db=connection_uri.dbid, password=connection_uri.password, socket_timeout=connection_uri.socket_timeout)
def _get_redis_client(driver): conf = driver.redis_conf connection_uri = ConnectionURI(conf.uri) if connection_uri.strategy == STRATEGY_SENTINEL: sentinel = redis.sentinel.Sentinel( connection_uri.sentinels, db=connection_uri.dbid, password=connection_uri.password, socket_timeout=connection_uri.socket_timeout) # NOTE(prashanthr_): The socket_timeout parameter being generic # to all redis connections is inherited from the parameters for # sentinel. return sentinel.master_for(connection_uri.master) elif connection_uri.strategy == STRATEGY_TCP: return redis.StrictRedis( host=connection_uri.hostname, port=connection_uri.port, db=connection_uri.dbid, password=connection_uri.password, socket_timeout=connection_uri.socket_timeout) else: return redis.StrictRedis( unix_socket_path=connection_uri.unix_socket_path, db=connection_uri.dbid, password=connection_uri.password, socket_timeout=connection_uri.socket_timeout)
def __init__(self, args): self.env = args.env self.action = args.action self.kill_delay = args.kill_delay self.conn = None # Load settings self.config = Config(os.path.dirname(os.path.realpath(__file__))) # There may be overiding settings specific to the server we are running on servername = socket.gethostname().split('.')[0] if servername and os.path.isfile(f'settings/workers/{self.env}_{servername}.py'): self.config.from_object(f'settings.workers.{self.env}_{servername}.Config') else: self.config.from_object(f'settings.workers.{self.env}.Config') # Redis if self.config['REDIS_USE_SENTINEL']: sentinel = redis.sentinel.Sentinel( self.config['REDIS_ADDRESS'], db=self.config['REDIS_DB'], password=self.config['REDIS_PASSWORD'], decode_responses=True ) self.conn = sentinel.master_for(self.config['REDIS_SENTINEL_MASTER']) else: self.conn = redis.StrictRedis( host=self.config['REDIS_ADDRESS'][0], port=self.config['REDIS_ADDRESS'][1], db=self.config['REDIS_DB'], password=self.config['REDIS_PASSWORD'], decode_responses=True )
def _get_redis_connection(self, group, shard): """ Create and return a Redis Connection for the given group Returns: redis.StrictRedis: The Redis Connection Raises: Exception: Passes through any exceptions that happen in trying to get the connection pool """ redis_group = self.__config.redis_urls_by_group[group][shard] self.__logger.info( u'Attempting to connect to Redis for group "{}", shard "{}", url "{}"' .format(group, shard, redis_group)) if isinstance(redis_group, PanoptesRedisConnectionConfiguration): redis_pool = redis.BlockingConnectionPool( host=redis_group.host, port=redis_group.port, db=redis_group.db, password=redis_group.password) redis_connection = redis.StrictRedis(connection_pool=redis_pool) elif isinstance(redis_group, PanoptesRedisSentinelConnectionConfiguration): sentinels = [(sentinel.host, sentinel.port) for sentinel in redis_group.sentinels] self.__logger.info( u'Querying Redis Sentinels "{}" for group "{}", shard "{}"'. format(repr(redis_group), group, shard)) sentinel = redis.sentinel.Sentinel(sentinels) master = sentinel.discover_master(redis_group.master_name) password_present = u'yes' if redis_group.master_password else u'no' self.__logger.info( u'Going to connect to master "{}" ({}:{}, password: {}) for group "{}", shard "{}""' .format(redis_group.master_name, master[0], master[1], password_present, group, shard)) redis_connection = sentinel.master_for( redis_group.master_name, password=redis_group.master_password) else: self.__logger.info( u'Unknown Redis configuration object type: {}'.format( type(redis_group))) return self.__logger.info( u'Successfully connected to Redis for group "{}", shard "{}", url "{}"' .format(group, shard, redis_group)) return redis_connection
def __init__( self, url: Optional[str] = None, name: str = "tilecloud", stop_if_empty: bool = True, timeout: int = 5, pending_timeout: int = 5 * 60, max_retries: int = 5, max_errors_age: int = 24 * 3600, max_errors_nb: int = 100, sentinels: Optional[List[Tuple[str, int]]] = None, service_name: str = "mymaster", sentinel_kwargs: Any = None, connection_kwargs: Any = None, **kwargs: Any, ): super().__init__(**kwargs) connection_kwargs = connection_kwargs or {} if sentinels is not None: sentinel = redis.sentinel.Sentinel(sentinels, sentinel_kwargs=sentinel_kwargs, **connection_kwargs) self._master = sentinel.master_for(service_name) self._slave = sentinel.slave_for(service_name) else: assert url is not None self._master = redis.Redis.from_url(url, **connection_kwargs) self._slave = self._master self._stop_if_empty = stop_if_empty self._timeout_ms = int(timeout * 1000) self._pending_timeout_ms = int(pending_timeout * 1000) self._max_retries = max_retries self._max_errors_age = max_errors_age self._max_errors_nb = max_errors_nb if not name.startswith("queue_"): name = "queue_" + name self._name_str = name self._name = name.encode("utf-8") self._errors_name = self._name + b"_errors" try: self._master.xgroup_create(name=self._name, groupname=STREAM_GROUP, id="0-0", mkstream=True) except redis.ResponseError as e: if "BUSYGROUP" not in str(e): raise
def master_for(self, service_name, **kwargs): try: return self.local.master_connections[service_name] except AttributeError: self.local.master_connections = {} except KeyError: pass sentinel = self.sentinel if sentinel is None: msg = 'Cannot get master {} using non-sentinel configuration' raise RuntimeError(msg.format(service_name)) conn = sentinel.master_for(service_name, redis_class=self.client_class, **kwargs) self.local.master_connections[service_name] = conn return conn
def __init__( self, sentinels=None, master=None, password=None, db=0, default_timeout=300, key_prefix=None, **kwargs ): super(RedisSentinelCache, self).__init__(default_timeout) try: import redis.sentinel except ImportError: raise RuntimeError("no redis module found") if kwargs.get("decode_responses", None): raise ValueError( "decode_responses is not supported by " "RedisCache." ) sentinels = sentinels or [("127.0.0.1", 26379)] sentinel_kwargs = { key[9:]: value for key, value in kwargs.items() if key.startswith("sentinel_") } kwargs = { key[9:]: value for key, value in kwargs.items() if not key.startswith("sentinel_") } sentinel = redis.sentinel.Sentinel( sentinels=sentinels, password=password, db=db, sentinel_kwargs=sentinel_kwargs, **kwargs ) self._write_client = sentinel.master_for(master) self._read_clients = sentinel.slave_for(master) self.key_prefix = key_prefix or ""
def check(self): log.info('检查 Redis Sentinel 连接: %s', self._master) try: sentinel = redis.sentinel.Sentinel( [s.split(':') for s in self._sentinels], password=self._password, socket_timeout=0.1) log.info('> master: %s', sentinel.discover_master(self._master)) log.info('> slaves: %s', sentinel.discover_slaves(self._master)) log.info('尝试获取 master 节点状态信息...') master = sentinel.master_for(self._master, socket_timeout=0.1) self._log_redis_info(master.info()) except redis.sentinel.MasterNotFoundError as e: log.error('检查 Redis Sentinel 出错: %s - %s', e.__class__, e)
def __init__( self, sentinels=None, master=None, password=None, db=0, default_timeout=300, key_prefix=None, **kwargs ): super().__init__(default_timeout=default_timeout) try: import redis.sentinel except ImportError: raise RuntimeError("no redis module found") if kwargs.get("decode_responses", None): raise ValueError( "decode_responses is not supported by " "RedisCache." ) sentinels = sentinels or [("127.0.0.1", 26379)] sentinel_kwargs = { key[9:]: value for key, value in kwargs.items() if key.startswith("sentinel_") } kwargs = { key[9:]: value for key, value in kwargs.items() if not key.startswith("sentinel_") } sentinel = redis.sentinel.Sentinel( sentinels=sentinels, password=password, db=db, sentinel_kwargs=sentinel_kwargs, **kwargs ) self._write_client = sentinel.master_for(master) self._read_clients = sentinel.slave_for(master) self.key_prefix = key_prefix or ""
def _create_client(self): sentinel_kwargs = {} sentinel_kwargs.update(self.sentinel_kwargs) sentinel_kwargs.setdefault("password", self.password) connection_kwargs = {} connection_kwargs.update(self.connection_kwargs) connection_kwargs.setdefault("password", self.password) if self.db is not None: connection_kwargs.setdefault("db", self.db) sentinel_kwargs.setdefault("db", self.db) if self.socket_timeout is not None: connection_kwargs.setdefault("socket_timeout", self.socket_timeout) sentinel = redis.sentinel.Sentinel(self.sentinels, sentinel_kwargs=sentinel_kwargs, **connection_kwargs) self.writer_client = sentinel.master_for(self.service_name) self.reader_client = sentinel.slave_for(self.service_name)
def __init__(self, config: tilecloud_chain.configuration.Redis, **kwargs: Any): """Initialize.""" super().__init__(**kwargs) connection_kwargs = {} if "socket_timeout" in config: connection_kwargs["socket_timeout"] = config["socket_timeout"] if "db" in config: connection_kwargs["db"] = config["db"] if "url" in config: self._master = redis.Redis.from_url( config["url"], **connection_kwargs) # type: ignore self._slave = self._master else: sentinel = redis.sentinel.Sentinel(config["sentinels"], **connection_kwargs) self._master = sentinel.master_for( config.get("service_name", "mymaster")) self._slave = sentinel.slave_for( config.get("service_name", "mymaster")) self._prefix = config["prefix"] self._expiration = config["expiration"]
def sentinel_backend( cls, sentinels: List[str], redis_service_name: str, database: int, password: Optional[str], namespace: str, ttl: int = DEFAULT_TTL, ) -> "STDataStore": """Use this to connect to Redis Sentinel or Single Redis database. If more than one server are provided as `sentinels`, use Sentinel mode, otherwise fallback to single mode. """ hosts = [to_host(hs) for hs in sentinels] if len(hosts) == 1: return cls.simple_redis_backend(hosts[0], database, password, namespace, ttl) sentinel = redis.sentinel.Sentinel(hosts, db=database, password=password, health_check_interval=HCI) return cls(sentinel.master_for(redis_service_name), namespace, ttl) # type: ignore
def connect(url, sentinel_class=redis.sentinel.Sentinel, sentinel_options=None, client_class=redis.StrictRedis, client_options=None): parsed_url = urlparse.urlparse(url) if parsed_url.scheme not in ['redis', 'rediss', 'unix', 'redis+sentinel']: raise ValueError('Unsupported redis URL scheme: {}'.format( parsed_url.scheme)) if sentinel_options is None: sentinel_options = {} if client_options is None: client_options = {} if parsed_url.scheme != 'redis+sentinel': return None, client_class.from_url(url, **client_options) sentinel_url = parse_sentinel_url(url, sentinel_options=sentinel_options, client_options=client_options) sentinel = sentinel_class(sentinel_url.hosts, sentinel_kwargs=sentinel_url.sentinel_options, **sentinel_url.client_options) client = None if sentinel_url.default_client: if sentinel_url.default_client.type == 'master': client = sentinel.master_for(sentinel_url.default_client.service, redis_class=client_class) else: client = sentinel.slave_for(sentinel_url.default_client.service, redis_class=client_class) return sentinel, client
def __init__(self, env, idle_lifespan): # Load settings self.config = Config(os.path.dirname(os.path.realpath(__file__))) # There may be overiding settings specific to the server we are running on servername = socket.gethostname().split('.')[0] if servername and os.path.isfile(f'settings/workers/{env}_{servername}.py'): self.config.from_object(f'settings.workers.{env}_{servername}.Config') else: self.config.from_object(f'settings.workers.{env}.Config') # Sentry (logging) if self.config.get('SENTRY_DSN'): sentry_logging = LoggingIntegration( level=logging.INFO, event_level=logging.WARNING ) self.sentry = sentry_sdk.init( self.config.get('SENTRY_DSN'), integrations=[ sentry_logging, RedisIntegration() ] ) # Mongo database self.mongo = pymongo.MongoClient(self.config.get('MONGO_URI')) self.db = self.mongo.get_default_database() mongoframes.Frame._client = self.mongo if self.config.get('MONGO_PASSWORD'): self.db.authenticate( self.config.get('MONGO_USERNAME'), self.config.get('MONGO_PASSWORD') ) # Redis if self.config['REDIS_USE_SENTINEL']: sentinel = redis.sentinel.Sentinel( self.config['REDIS_ADDRESS'], db=self.config['REDIS_DB'], password=self.config['REDIS_PASSWORD'], decode_responses=True ) conn = sentinel.master_for(self.config['REDIS_SENTINEL_MASTER']) else: conn = redis.StrictRedis( host=self.config['REDIS_ADDRESS'][0], port=self.config['REDIS_ADDRESS'][1], db=self.config['REDIS_DB'], password=self.config['REDIS_PASSWORD'], decode_responses=True ) super().__init__( conn, [AnalyzeTask, GenerateVariationTask], broadcast_channel='h51_events', max_status_interval=self.config['ASSET_WORKER_MAX_STATUS_INTERVAL'], max_spawn_time=self.config['ASSET_WORKER_MAX_SPAWN_TIME'], sleep_interval=self.config['ASSET_WORKER_SLEEP_INTERVAL'], idle_lifespan=idle_lifespan, population_control=self.config['ASSET_WORKER_POPULATION_CONTROL'], population_spawner=self.config['ASSET_WORKER_POPULATION_SPAWNER'] )