async def test_busy_loading_from_pipeline(self, event_loop):
     """
     BusyLoadingErrors should be raised from a pipeline execution
     regardless of the raise_on_error flag.
     """
     client = aredis.StrictRedis(loop=event_loop)
     pipe = await client.pipeline()
     await pipe.execute_command('DEBUG', 'ERROR', 'LOADING fake message')
     with pytest.raises(RedisError):
         await pipe.execute()
     pool = client.connection_pool
     assert not pipe.connection
     assert len(pool._available_connections) == 1
     assert pool._available_connections[0]._writer
     assert pool._available_connections[0]._reader
Exemple #2
0
 async def test_on_connect_error(self, event_loop):
     """
     An error in Connection.on_connect should disconnect from the server
     see for details: https://github.com/andymccurdy/redis-py/issues/368
     """
     # this assumes the Redis server being tested against doesn't have
     # 9999 databases ;)
     bad_connection = aredis.StrictRedis(db=9999, loop=event_loop)
     # an error should be raised on connect
     with pytest.raises(RedisError):
         await bad_connection.info()
     pool = bad_connection.connection_pool
     assert len(pool._available_connections) == 1
     assert pool._available_connections[0]._writer
     assert pool._available_connections[0]._reader
Exemple #3
0
    async def start(self, *args, **kwargs) -> None:

        try:
            log.debug('[PSQL] Attempting connection.')
            db = await asyncpg.create_pool(**self.config.postgresql,
                                           max_inactive_connection_lifetime=0)
        except Exception as e:
            log.critical(f'[PSQL] Error while connecting.\n{e}\n')
            print(
                f'\n[POSTGRESQL] An error occurred while connecting to PostgreSQL: {e}'
            )
            raise ConnectionError
        else:
            log.info('[PSQL] Successful connection.')
            print(f'\n[POSTGRESQL] Connected to the PostgreSQL database.')
            self.db = db

        try:
            log.debug('[REDIS] Attempting connection.')
            redis = aredis.StrictRedis(**self.config.redis)
            await redis.set('connected', 0)
        except (aredis.ConnectionError, aredis.ResponseError):
            log.critical(f'[REDIS] Error while connecting.')
            print(f'[REDIS] An error occurred while connecting to Redis.\n')
            raise ConnectionError
        else:
            log.info('[REDIS] Successful connection.')
            print(f'[REDIS] Connected to Redis.\n')
            self.redis = redis

        for extension in self.config.extensions:
            try:
                self.load_extension(extension)
                log.info(f'[EXTENSIONS] Loaded - {extension}')
                print(f'[EXTENSIONS] Loaded - {extension}')
            except commands.ExtensionNotFound:
                log.warning(f'[EXTENSIONS] Extension not found - {extension}')
                print(f'[EXTENSIONS] Extension not found - {extension}')
            except commands.NoEntryPointError:
                log.warning(f'[EXTENSIONS] No entry point - {extension}')
                print(f'[EXTENSIONS] No entry point - {extension}')
            except commands.ExtensionFailed as error:
                log.warning(
                    f'[EXTENSIONS] Failed - {extension} - Reason: {error}')
                print(f'[EXTENSIONS] Failed - {extension} - Reason: {error}')

        self.add_check(self.command_check)
        await super().start(*args, **kwargs)
 async def test_connection_idle_check(self, event_loop):
     rs = aredis.StrictRedis(host='127.0.0.1',
                             port=6379,
                             db=0,
                             max_idle_time=0.2,
                             idle_check_interval=0.1)
     await rs.info()
     assert len(rs.connection_pool._available_connections) == 1
     assert len(rs.connection_pool._in_use_connections) == 0
     conn = rs.connection_pool._available_connections[0]
     last_active_at = conn.last_active_at
     await asyncio.sleep(0.3)
     assert len(rs.connection_pool._available_connections) == 0
     assert len(rs.connection_pool._in_use_connections) == 0
     assert last_active_at == conn.last_active_at
     assert conn._writer is None and conn._reader is None
Exemple #5
0
 def from_spider(cls, spider, name, data_format=None, data_filter=None):
     queue = cls(name=name,
                 data_format=data_format,
                 data_filter=data_filter)
     redis_option = spider.config.get('REDIS')
     redis = aredis.StrictRedis(**redis_option)
     queue.redis = redis
     key_name = name
     queue.key = "queue_for_%s_%s" % (spider.name, key_name)
     if data_filter:
         bloom_filter = BloomFilter(20000, 0.1, array_cls=False)
         array = RedisBitArray(
             redis, "bloom_filter_for_%s_%s" % (spider.name, key_name))
         bloom_filter.set_array(array)
         queue.bloom_filter = bloom_filter
     return queue
 async def test_busy_loading_from_pipeline_immediate_command(
         self, event_loop):
     """
     BusyLoadingErrors should raise from Pipelines that execute a
     command immediately, like WATCH does.
     """
     client = aredis.StrictRedis(loop=event_loop)
     pipe = await client.pipeline()
     with pytest.raises(BusyLoadingError):
         await pipe.immediate_execute_command('DEBUG', 'ERROR',
                                              'LOADING fake message')
     pool = client.connection_pool
     assert not pipe.connection
     assert len(pool._available_connections) == 1
     assert not pool._available_connections[0]._writer
     assert not pool._available_connections[0]._reader
Exemple #7
0
    def init_engine(self,
                    *,
                    host: str = None,
                    port: int = None,
                    dbname: int = None,
                    passwd: str = "",
                    pool_size: int = None):
        """
        redis 非阻塞工具类
        Args:
            host:redis host
            port:redis port
            dbname: database name
            passwd: redis password
            pool_size: redis pool size
        Returns:

        """
        host = host or self.host
        port = port or self.port
        dbname = dbname or self.dbname
        passwd = passwd or self.passwd
        pool_size = pool_size or self.pool_size

        passwd = passwd if passwd is None else str(passwd)
        # 返回值都做了解码,应用层不需要再decode
        self.pool = aredis.ConnectionPool(host=host,
                                          port=port,
                                          db=dbname,
                                          password=passwd,
                                          decode_responses=True,
                                          max_connections=pool_size)
        self.redis_db = aredis.StrictRedis(connection_pool=self.pool,
                                           decode_responses=True)

        @atexit.register
        def close_connection():
            """
            释放redis连接池所有连接
            Args:

            Returns:

            """
            self.redis_db = None
            if self.pool:
                self.pool.disconnect()
def test_fake_conn_pool_disable_instance(loop):
    client = aredis.StrictRedis(host=DB_SETTINGS['host'],
                                port=DB_SETTINGS['port'],
                                db=0)

    # Get a real connection

    conn = client.connection_pool.get_connection('GET')

    # Replace the original connection pool with one that doesn't
    # have the `connection_kwargs` attribute.

    fake_pool = FakeConnectionPool(conn)
    client.connection_pool = fake_pool
    assert not hasattr(client.connection_pool, 'connection_kwargs')

    loop.run_until_complete(exercise_redis(client))
Exemple #9
0
        async def open_connection(app_, loop):
            """

            Args:

            Returns:

            """
            # 返回值都做了解码,应用层不需要再decode
            self.pool = aredis.ConnectionPool(host=host,
                                              port=port,
                                              db=dbname,
                                              password=passwd,
                                              decode_responses=True,
                                              max_connections=pool_size)
            self.redis_db = aredis.StrictRedis(connection_pool=self.pool,
                                               decode_responses=True)
Exemple #10
0
    def __init__(self, bot):
        self.bot = bot

        self.settings = fileIO('data/nwn/settings.json', 'load')

        if (self.settings["DISCORD_CHAT_CHANNEL_ID"] is None
                or self.settings["DISCORD_CHAT_CHANNEL_ID"] == ""):
            print(
                "NWN -> NOTICE: Chat Channel ID not set, disabling chat functionality!"
            )
        else:
            self.bot.add_listener(self.check_chat_messages, "on_message")

        self.redisConn = aredis.StrictRedis(
            host=self.settings["REDIS_HOSTNAME"],
            port=self.settings["REDIS_PORT"])
        self.redisSubscribe = self.redisConn.pubsub()
        self.redisSubFuture = asyncio.ensure_future(
            self.sub_reader(self.redisSubscribe))
Exemple #11
0
 def __init__(
     self,
     host: str = redis_host,
     port: int = redis_port,
     db: str = redis_db,
     max_idle_time: int = 30,
     idle_check_interval: float = 0.1,
 ):
     self.db = db
     self.max_idle_time: int = max_idle_time
     self.idle_check_interval: float = idle_check_interval
     self.host: str = host
     self.port: int = port
     self.verified: bool = False
     self.pool = aredis.ConnectionPool(host=self.host,
                                       port=self.port,
                                       db=self.db,
                                       max_connections=20)
     self.redis = aredis.StrictRedis(connection_pool=self.pool)
Exemple #12
0
def mock_cluster_resp_nodes(event_loop):
    r = aredis.StrictRedis(loop=event_loop)
    response = (b'c8253bae761cb1ecb2b61857d85dfe455a0fec8b 172.17.0.7:7006 '
                b'slave aa90da731f673a99617dfe930306549a09f83a6b 0 '
                b'1447836263059 5 connected\n'
                b'9bd595fe4821a0e8d6b99d70faa660638a7612b3 172.17.0.7:7008 '
                b'master - 0 1447836264065 0 connected\n'
                b'aa90da731f673a99617dfe930306549a09f83a6b 172.17.0.7:7003 '
                b'myself,master - 0 0 2 connected 5461-10922\n'
                b'1df047e5a594f945d82fc140be97a1452bcbf93e 172.17.0.7:7007 '
                b'slave 19efe5a631f3296fdf21a5441680f893e8cc96ec 0 '
                b'1447836262556 3 connected\n'
                b'4ad9a12e63e8f0207025eeba2354bcf4c85e5b22 172.17.0.7:7005 '
                b'master - 0 1447836262555 7 connected 0-5460\n'
                b'19efe5a631f3296fdf21a5441680f893e8cc96ec 172.17.0.7:7004 '
                b'master - 0 1447836263562 3 connected 10923-16383\n'
                b'fbb23ed8cfa23f17eaf27ff7d0c410492a1093d6 172.17.0.7:7002 '
                b'master,fail - 1447829446956 1447829444948 1 disconnected\n')
    return _gen_cluster_mock_resp(r, response, loop=event_loop)
Exemple #13
0
    async def start(self, *args, **kwargs) -> None:

        try:
            db = await asyncpg.create_pool(**self.config.postgresql)
        except Exception as e:
            print(f'\n[POSTGRESQL] An error occurred while connecting to PostgreSQL: {e}')
        else:
            print(f'\n[POSTGRESQL] Connected to the PostgreSQL database.')
            self.db = db

        try:
            redis = aredis.StrictRedis(**self.config.redis)
        except aredis.ConnectionError:
            print(f'[REDIS] An error occurred while connecting to Redis.\n')
        else:
            print(f'[REDIS] Connected to Redis.\n')
            self.redis = redis

        for extension in self.config.extensions:
            try:
                self.load_extension(extension)
                print(f'[EXTENSIONS] Loaded - {extension}')
            except commands.ExtensionNotFound:
                print(f'[EXTENSIONS] Extension not found - {extension}')
            except commands.NoEntryPointError:
                print(f'[EXTENSIONS] No entry point - {extension}')
            except commands.ExtensionFailed as error:
                print(f'[EXTENSIONS] Failed - {extension} - Reason: {error}')

        self.commands_not_allowed_dms = {
            'join', 'play', 'leave', 'skip', 'pause', 'unpause', 'seek', 'volume', 'now_playing', 'queue', 'shuffle', 'clear', 'reverse', 'loop', 'remove', 'move',
            'musicinfo',

            'tag', 'tag raw', 'tag create', 'tag edit', 'tag claim', 'tag alias', 'tag transfer', 'prefix delete', 'tag search', 'tag list', 'tag all', 'tag info',
            'icon', 'server', 'channels', 'member'

            'prefix add', 'prefix delete', 'prefix clear', 'config colour set', 'config colour clear'
        }

        self.add_check(self.can_run_commands)

        await super().start(*args, **kwargs)
Exemple #14
0
def mock_cluster_resp_slots(event_loop):
    r = aredis.StrictRedis(loop=event_loop)
    response = ([
        [
            0, 5460,
            [b'172.17.0.2', 7000, b'90406a8afa09afb6b4aa614edc32b5d1c0eb22aa'],
            [b'172.17.0.2', 7003, b'0c8f3cd0baf30357fc2f6e871f68f7d423aac931']
        ],
        [
            10923, 16383,
            [b'172.17.0.2', 7002, b'cc8417fdb2fef950092d8e310f521d8296293e96'],
            [b'172.17.0.2', 7005, b'da700b467f4931b4241024a74bb858695304012b']
        ],
        [
            5461, 10922,
            [b'172.17.0.2', 7001, b'cab54cba256f159c1400ee80e29c37f256f46580'],
            [b'172.17.0.2', 7004, b'f0674d2b02c7c0432c9f2bf0108255aaf20179be']
        ]
    ])
    return _gen_cluster_mock_resp(r, response, loop=event_loop)
Exemple #15
0
async def clear_redis_cache(db, config):
    """Clear the Redis cache.

    Arguments:
        db {int} -- Redis database ID
        config {dict} -- Redis configuration parameters

    Raises:
        RuntimeError: Raised if clearing the cache produces an error.

    Returns:
        {bool} -- True if cache was cleared.
    """
    import aredis

    try:
        redis_instance = aredis.StrictRedis(db=db, **config)
        await redis_instance.flushdb()
    except Exception as e:
        raise RuntimeError(f"Error clearing cache: {str(e)}") from None
    return True
Exemple #16
0
async def process_pr_events(pr_events):
    """
    PR Events Stream Processor
    Manages Pull Request Monitoring from Open to Close
    """
    async for pr_event in pr_events:
        client = aredis.StrictRedis(host='localhost', port=6379)
        await client.set(pr_event.payload.pull_request.id, pr_event.dumps()) # pr_event
        if pr_event.type == "PullRequestEvent":
           ### produce to closed pr topic
           if pr_event.payload.action == 'closed':
               await client.set(pr_event.payload.pull_request.id, pr_event.dumps())
               await pr_closed_topic.send(value=pr_event.payload.pull_request.id)
           ### store open time
           elif pr_event.payload.action == 'opened':
               await client.set(str(pr_event.payload.pull_request.id) + 'opentime', pr_event.created_at)
           ### all pr activity incrementer
           else:
               await client.incr(str(pr_event.payload.pull_request.id) + 'events')
        ### review incrementer
        elif pr_event.type == "PullRequestReviewEvent":
           await client.incr(str(pr_event.payload.pull_request.id) + 'reviews')
Exemple #17
0
 async def acquire(self):
     """
     Take a connection from the pool.
     """
     db = None
     self._connection = None
     # Take a connection from the pool.
     try:
         if not self._pool:
             await self._pool.connect()
         self._connection = aredis.StrictRedis(connection_pool=self._pool)
     except (aredis.exceptions.ConnectionError,
             aredis.exceptions.RedisError) as err:
         raise ConnectionError(
             "Redis Pool is closed o doesnt exists: {}".format(str(err)))
     except Exception as err:
         raise ProviderError("Redis Pool Acquire Error: {}".format(
             str(err)))
         return False
     if self._connection:
         db = asyncredis(connection=self._connection, pool=self)
     return db
Exemple #18
0
async def updates_generator(request, *endpoints):
    pubsub = aredis.StrictRedis().pubsub()
    for endpoint in endpoints:
        await pubsub.subscribe(f"sunflower:channel:{endpoint}:updates")
    while True:
        client_disconnected = await request.is_disconnected()
        if client_disconnected:
            print(datetime.now(), "Disconnected")
            break
        message = await pubsub.get_message()
        if message is None:
            continue
        redis_data = message.get("data")
        if redis_data == str(NotifyChangeStatus.UNCHANGED.value).encode():
            yield ":\n\n"
            continue
        if redis_data != str(NotifyChangeStatus.UPDATED.value).encode():
            continue
        redis_channel = message.get("channel").decode()
        channel_endpoint = redis_channel.split(":")[2]
        data_to_send = {"channel": channel_endpoint, "status": "updated"}
        yield f'data: {json.dumps(data_to_send)}\n\n'
Exemple #19
0
 def __init__(
     self,
     proxy_url=None,
     grab_hk=False,
     redis_host="127.0.0.1",
     redis_port=6379,
     redis_password="",
     redis_db=0,
     max_keep=float('inf')
 ):
     """
     proxy_url : 如要抓取香港代理需要设置代理
     grab_hk : 是否抓取香港代理,默认为false
     """
     self.name = "engin"
     self.rdm = aredis.StrictRedis(
         host=redis_host, port=redis_port, password=redis_password, db=redis_db
     )
     self.max_keep = max_keep
     self.grab_hk = grab_hk
     self.proxy_url = proxy_url
     self.sites = self.load_default_sites()
     self.sem = asyncio.Semaphore(10)
Exemple #20
0
async def setup_io():
    """
    Set up all IO used by the scheduler.

    :return A tuple of awaitable tasks
    """
    s3 = boto3.client(
        's3',
        settings.AWS_DEFAULT_REGION,
        config=botocore.client.Config(max_pool_connections=settings.MAX_TASKS))
    producer = Producer({'bootstrap.servers': settings.KAFKA_HOSTS})
    metadata_producer = AsyncProducer(producer, 'image_metadata_updates')
    retry_producer = AsyncProducer(producer, 'inbound_images')
    link_rot_producer = AsyncProducer(producer, 'link_rot')
    redis_client = aredis.StrictRedis(host=settings.REDIS_HOST)
    connector = aiohttp.TCPConnector(ssl=False)
    aiosession = RateLimitedClientSession(
        aioclient=aiohttp.ClientSession(connector=connector),
        redis=redis_client)
    stats = StatsManager(redis_client)
    image_processor = partial(process_image,
                              session=aiosession,
                              persister=partial(save_thumbnail_s3,
                                                s3_client=s3),
                              stats=stats,
                              metadata_producer=metadata_producer,
                              retry_producer=retry_producer,
                              rot_producer=link_rot_producer)
    consumer_settings = {
        'bootstrap.servers': settings.KAFKA_HOSTS,
        'group.id': 'image_handlers',
        'auto.offset.reset': 'earliest'
    }
    scheduler = CrawlScheduler(consumer_settings, redis_client,
                               image_processor)
    return (metadata_producer.listen(), retry_producer.listen(),
            link_rot_producer.listen(), scheduler.schedule_loop())
Exemple #21
0
async def process_pr_closed(closed_pr_ids):
    """
    PR Closed Events Consumer
    Looks up PR Keys for Closed PR
    Moves Values from Redis to Postgres
    """
    async for pr_id in closed_pr_ids:
        ### Redis reads + processing
        client = aredis.StrictRedis(host='localhost', port=6379)
        opentime = await client.get(str(pr_id) + 'opentime')
        if opentime is not None:
            opentime = parse(opentime)
        num_events = await client.get(str(pr_id) + 'events')
        if num_events:
            num_events = int(num_events)
        num_review = await client.get(str(pr_id) + 'reviews')
        if num_review:
            num_review = int(num_review)
        close_event = await client.get(pr_id)
        close_event = json.loads(close_event)
        closetime = parse(close_event['created_at'])
        ### postgres writes
        async with create_engine(user='******', database='gitdb', host=secrets.POSTGRES_BROKER) as engine:
            async with engine.acquire() as conn:
                await conn.execute(
                "insert into pull_requests (id,num,repo,pr_diff_url,created_at,closed_at,additions,changed_files,commits,deletions,merged,num_reviews_requested,num_review_comments) values (%s, %s, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
                                    (pr_id, 0,
                                    close_event['repo']['name'],
                                    close_event['payload']['pull_request']['diff_url'],
                                    opentime, closetime,
                                    close_event['payload']['pull_request']['additions'],
                                    close_event['payload']['pull_request']['changed_files'],
                                    close_event['payload']['pull_request']['commits'],
                                    close_event['payload']['pull_request']['deletions'],
                                    close_event['payload']['pull_request']['merged'],
                                    num_review, num_events))
Exemple #22
0
 async def connect(self, **kwargs):
     """
     __init async db initialization
     """
     self.logger.debug("Asyncio Redis Pool: Connecting to {}".format(
         self._dsn))
     try:
         self._pool = aredis.ConnectionPool.from_url(
             self._dsn,
             connection_class=aredis.Connection,
             max_connections=self._max_queries,
             connect_timeout=self._timeout,
             decode_responses=True,
             retry_on_timeout=True,
             loop=self._loop,
             **kwargs,
         )
     except (aredis.exceptions.ConnectionError,
             aredis.exceptions.RedisError) as err:
         raise ProviderError("Connection error to Redis: {}".format(
             str(err)))
     except Exception as err:
         raise ProviderError("Unable to connect to Redis: {}".format(
             str(err)))
     # is connected
     if self._pool:
         try:
             # create the connection and get the properties:
             self._connection = aredis.StrictRedis(
                 connection_pool=self._pool)
             self.properties = await self._connection.info()
         except Exception as err:
             raise ProviderError("Unable to connect to Redis: {}".format(
                 str(err)))
         self._connected = True
         self._initialized_on = time.time()
Exemple #23
0
async def setup_io():
    """
    Set up all IO used by the scheduler.
    """
    kafka_client = kafka_connect()
    s3 = boto3.client(
        's3',
        settings.AWS_DEFAULT_REGION,
        config=botocore.client.Config(max_pool_connections=settings.MAX_TASKS))
    metadata_updates = kafka_client.topics['image_metadata_updates'] \
        .get_producer(use_rdkafka=True)
    producer = MetadataProducer(producer=metadata_updates)
    redis_client = aredis.StrictRedis(host=settings.REDIS_HOST)
    aiosession = RateLimitedClientSession(aioclient=aiohttp.ClientSession(),
                                          redis=redis_client)
    stats = StatsManager(redis_client)
    image_processor = partial(process_image,
                              session=aiosession,
                              persister=partial(save_thumbnail_s3,
                                                s3_client=s3),
                              stats=stats,
                              metadata_producer=producer)
    scheduler = CrawlScheduler(kafka_client, redis_client, image_processor)
    return producer.listen(), scheduler.schedule_loop()
Exemple #24
0
async def check_redis(db, config):
    """Ensure Redis is running before starting server.

    Arguments:
        db {int} -- Redis database ID
        config {dict} -- Redis configuration parameters

    Raises:
        RuntimeError: Raised if Redis is not running.

    Returns:
        {bool} -- True if redis is running.
    """
    import aredis

    redis_instance = aredis.StrictRedis(db=db, **config)
    redis_host = config["host"]
    redis_port = config["port"]
    try:
        await redis_instance.echo("hyperglass test")
    except Exception:
        raise RuntimeError(
            f"Redis isn't running at: {redis_host}:{redis_port}") from None
    return True
Exemple #25
0
    def __init__(self, spider):
        self.spider = spider
        self.response_filter_code = [404]

        self.log_level = logging.DEBUG
        self.log_format = LOG_FORMAT
        self.log_datefmt = LOG_DATEFMT

        self.redis_host = 'localhost'
        self.redis_port = 6379

        self.request_queue = PriorityQueue()
        self.request_filter = set()
        self.request_filter_key = '{}:{}'.format(spider.name, 'filter')
        self._request_batch_size = 10
        self._request_timeout = 30
        self.request_keys = [
            'url', 'method', 'data', 'json', 'headers', 'cookies',
            'allow_redirects'
        ]

        self.request_middlewares = [spider.request_finger]
        self.response_middlewares = [spider.response_filter]
        self.item_pipelines = [spider.pipeline]

        self.user_agent_list = USER_AGENT_LIST
        self.aiohttp_clientsession = None
        self.clear_filter = True
        self.redis_msg = False

        try:
            self.redis_client = aredis.StrictRedis(host=self.redis_host,
                                                   port=self.redis_port)
        except:
            self.redis_client = None
            self.redis_msg = False
Exemple #26
0
    def __init__(self, config_file, *args, **kwargs):
        self.config_file = config_file
        self.description = 'qtbot is a big qt written in python3 and love.'
        self.do_not_load = ('league')

        with open(self.config_file) as f:
            self.api_keys = json.load(f)

        self.token = self.api_keys['discord']

        super().__init__(command_prefix=self.get_prefix,
                         description=self.description,
                         pm_help=None,
                         case_insensitive=True,
                         *args,
                         **kwargs)

        self.aio_session = aiohttp.ClientSession(loop=self.loop)
        # self.rune_client = lolrune.AioRuneClient()
        self.redis_client = aredis.StrictRedis(host='localhost',
                                               decode_responses=True)
        self.startup_extensions = [x.stem for x in Path('cogs').glob('*.py')]
        self.loop.run_until_complete(self.create_db_pool())
        self.loop.run_until_complete(self.load_all_prefixes())
Exemple #27
0
def getRedis():
    return aredis.StrictRedis(host=REDIS_HOST,
                              port=REDIS_PORT,
                              password=REDIS_PASSWORD,
                              db=REDIS_DB)
Exemple #28
0
env = Env()
# Read .env into os.environ
if os.path.exists(".env"):
    env.read_env()
if os.path.exists(".env.ceylon"):
    env.read_env(".env.ceylon", recurse=False)
else:
    print("path not exits", os.path.abspath(".env.ceylon"))
# Initialize Redis ENV variables
stack_name_prefix = f"{stack_name}_" if stack_name != "" else ""
redis_host = os.environ.get(f'{stack_name_prefix}REDIS_HOST', '127.0.0.1')
redis_port = os.environ.get(f'{stack_name_prefix}REDIS_PORT', '6379')
redis_db = os.environ.get(f'{stack_name_prefix}REDIS_DB', '0')

client = aredis.StrictRedis(host=redis_host,
                            port=int(redis_port),
                            db=int(redis_db))


class SysLogger(object):
    def __init__(self, name="Agent"):
        self.name = name
        self.terminal = sys.stdout
        self.r = redis.Redis(host=redis_host, port=redis_port, db=redis_db)
        self.channel_name = f"{self.name}sys_log"
        self.terminal.write(f"Channel Name ::: {self.channel_name}")

    def write(self, message):
        self.terminal.write(message)
        self.r.publish(
            self.channel_name,
Exemple #29
0
def mock_cluster_resp_int(event_loop):
    r = aredis.StrictRedis(loop=event_loop)
    return _gen_cluster_mock_resp(r, b'2', loop=event_loop)
Exemple #30
0
def r(event_loop):
    return aredis.StrictRedis(loop=event_loop)