def __init__(self, dsn: str = "redis://127.0.0.1:6379/0", prefix: str = "aiotasks", loop=None): super().__init__(loop=loop, prefix=prefix) _, password, host, port, db = parse_dsn(dsn, default_port=6379, default_db=0) db = int(db) # if not port: # port = 6379 # # port = int(port) # try: # db = int(db) # # if not db: # db = 0 # except ValueError: # db = 0 self._redis_pub = self._loop_subscribers.run_until_complete( aioredis.create_redis(address=(host, port), db=db, password=password, loop=self._loop_subscribers)) self._redis_sub = self._loop_subscribers.run_until_complete( aioredis.create_redis(address=(host, port), db=db, password=password, loop=self._loop_subscribers))
def __init__(self, dsn: str = "redis://127.0.0.1:6379/0", prefix: str = "aiotasks", concurrency: int = 5, loop=None): super().__init__(loop=loop, prefix=prefix, concurrency=concurrency) _, password, host, port, db = parse_dsn(dsn, default_port=6379, default_db=0) db = int(db) # if not port: # port = 6379 # # port = int(port) # try: # db = int(db) # if not db: # db = 0 # except ValueError: # db = 0 self._redis_consumer = self._loop_delay. \ run_until_complete(aioredis.create_redis(address=(host, port), db=db, password=password, loop=self._loop_delay)) self._redis_poller = self._loop_delay. \ run_until_complete(aioredis.create_redis(address=(host, port), db=db, password=password, loop=self._loop_delay))
def test_repr(self): redis = yield from create_redis( ('localhost', self.redis_port), db=1, loop=self.loop) self.assertEqual(repr(redis), '<Redis <RedisConnection [db:1]>>') redis = yield from create_redis( ('localhost', self.redis_port), db=0, loop=self.loop) self.assertEqual(repr(redis), '<Redis <RedisConnection [db:0]>>')
def test_migrate(self): yield from self.add('my-key', 123) conn2 = yield from create_redis(('localhost', 6380), db=2, loop=self.loop) yield from conn2.delete('my-key') self.assertTrue((yield from self.redis.exists('my-key'))) self.assertFalse((yield from conn2.exists('my-key'))) ok = yield from self.redis.migrate('localhost', 6380, 'my-key', 2, 1000) self.assertTrue(ok) self.assertFalse((yield from self.redis.exists('my-key'))) self.assertTrue((yield from conn2.exists('my-key'))) with self.assertRaisesRegex(TypeError, "host .* str"): yield from self.redis.migrate(None, 1234, 'key', 1, 23) with self.assertRaisesRegex(TypeError, "args .* None"): yield from self.redis.migrate('host', '1234', None, 1, 123) with self.assertRaisesRegex(TypeError, "dest_db .* int"): yield from self.redis.migrate('host', 123, 'key', 1.0, 123) with self.assertRaisesRegex(TypeError, "timeout .* int"): yield from self.redis.migrate('host', '1234', 'key', 2, None) with self.assertRaisesRegex(ValueError, "Got empty host"): yield from self.redis.migrate('', '123', 'key', 1, 123) with self.assertRaisesRegex(ValueError, "dest_db .* greater equal 0"): yield from self.redis.migrate('host', 6379, 'key', -1, 1000) with self.assertRaisesRegex(ValueError, "timeout .* greater equal 0"): yield from self.redis.migrate('host', 6379, 'key', 1, -1000)
async def connect_to_db(): logger.info("Connect to Redis Server at %s:%d on database %d %s password", REDIS_HOST, REDIS_PORT, REDIS_DB, "without" if REDIS_PASSWORD is None else "with") redis_coro = create_redis((REDIS_HOST, REDIS_PORT), db=REDIS_DB, password=REDIS_PASSWORD, loop=shared.app.loop) logger.info("Connect to PostgreSQL Server at %s:%d on database %s with user %s %s password", POSTGRES_HOST, POSTGRES_PORT, POSTGRES_DB, POSTGRES_USER, "without" if POSTGRES_PASSWORD is None else "and") pg_coro = pg_connect(host=POSTGRES_HOST, port=POSTGRES_PORT, user=POSTGRES_USER, password=POSTGRES_PASSWORD, database=POSTGRES_DB, loop=shared.app.loop) global clients shared.redis, shared.postgres = await asyncio.gather(redis_coro, pg_coro, loop=shared.app.loop) logger.info("Connected")
def run(loop): client = yield aioredis.create_redis(('localhost', 6379), loop=loop) tasks = [] fs = [] s = Sync() for i in range(20): task_id = produce(i, client) tasks.append(task_id) print('Sent %s' % i) tasks fs.append(s.run(client, i, task_id)) print('Synced %s' % i) if i % 1000: yield tornado.gen.sleep(0.01) print('All tasks sent') for i in fs: yield fs yield tornado.gen.sleep(60) client.close() yield client.wait_closed()
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs, formatter=DogbotHelpFormatter()) # configuration dict self.cfg = kwargs.get('cfg', {}) # aiohttp session used for fetching data self.session = aiohttp.ClientSession(loop=self.loop) # boot time (for uptime) self.boot_time = datetime.datetime.utcnow() # aioredis connection redis_coroutine = aioredis.create_redis( (self.cfg['db']['redis'], 6379), loop=self.loop) self.redis = self.loop.run_until_complete(redis_coroutine) # asyncpg pg = self.cfg['db']['postgres'] self.database = pg['database'] self.pgpool = self.loop.run_until_complete(asyncpg.create_pool(**pg)) # load core extensions self._exts_to_load = [] self.load_extensions('dog/core/ext', 'Core recursive load')
def get_value(digest): connection = yield from aioredis.create_redis( (config.get("redis")["host"], config.get("redis")["port"]), encoding='utf-8') value = yield from connection.get(digest) connection.close() return value
def setup_pubsub(self): redis = yield from aioredis.create_redis( (settings.REDIS_HOST, settings.REDIS_PORT) ) if self.role == 'stores': bind_addr = settings.SUBSCRIBER_ENDPOINTS[self.role] else: bind_addr = 'tcp://{host}:*'.format(host=settings.INTERNAL_HOST) self.subscriber = yield from aiozmq.rpc.serve_pubsub( self.handler, subscribe='', bind=bind_addr, log_exceptions=True) subscriber_addr = list(self.subscriber.transport.bindings())[0] self.publisher = yield from aiozmq.rpc.connect_pubsub() if self.role == 'storeclient': self.publisher.transport.connect( settings.SUBSCRIBER_ENDPOINTS['stores']) _key = 'SUBSCRIBER_REGISTERED_{}'.format(subscriber_addr) ret = 0 yield from redis.set(_key, ret) while ret != b'1': yield from self.publisher.publish( 'register_sub' ).register_sub( subscriber_addr, _key ) ret = yield from redis.get(_key) yield from asyncio.sleep(0.01) self.lock.release() redis.close()
def go(): redis = yield from aioredis.create_redis( ('localhost', 6379), loop=loop) bloom1 = BloomFilter(redis, 1000, 0.001, 'bloom:1') bloom2 = BloomFilter(redis, 1000, 0.001, 'bloom:2') yield from bloom1.add('tornado') yield from bloom1.add('python') yield from bloom2.add('asyncio') yield from bloom2.add('python') # intersection inter_bloom = yield from bloom1.intersection(bloom2) in_bloom1 = yield from inter_bloom.contains('python') print(in_bloom1) # True in_bloom2 = yield from inter_bloom.contains('asyncio') print(in_bloom2) # False # union union_bloom = yield from bloom1.intersection(bloom2) in_bloom1 = yield from union_bloom.contains('python') print(in_bloom1) # True in_bloom2 = yield from union_bloom.contains('asyncio') print(in_bloom2) # True redis.close()
def init_with_loop(self, loop): self.redis = loop.run_until_complete( aioredis.create_redis((options.redis_host, options.redis_port), loop=loop)) self.cache = RedisCacheBackend(self.redis) es_hosts = [x.strip() for x in options.es_hosts.split(',')] self.es = AsyncElasticsearch(hosts=es_hosts, loop=loop)
def pubsub(): sub = yield from aioredis.create_redis( ('localhost', 6379)) ch1, ch2 = yield from sub.subscribe('channel:1', 'channel:2') assert isinstance(ch1, aioredis.Channel) assert isinstance(ch2, aioredis.Channel) @asyncio.coroutine def async_reader(channel): while (yield from channel.wait_message()): msg = yield from channel.get(encoding='utf-8') # ... process message ... print("message in {}: {}".format(channel.name, msg)) tsk1 = asyncio.async(async_reader(ch1)) # Or alternatively: @asyncio.coroutine def async_reader2(channel): while True: msg = yield from channel.get(encoding='utf-8') if msg is None: break # ... process message ... print("message in {}: {}".format(channel.name, msg)) tsk2 = asyncio.async(async_reader2(ch2)) # Publish messages and terminate pub = yield from aioredis.create_redis( ('localhost', 6379)) while True: channels = yield from pub.pubsub_channels() if len(channels) == 2: break for msg in ("Hello", ",", "world!"): for ch in ('channel:1', 'channel:2'): yield from pub.publish(ch, msg) pub.close() sub.close() yield from asyncio.sleep(0) yield from pub.wait_closed() yield from sub.wait_closed() yield from asyncio.gather(tsk1, tsk2)
def pubsub(): sub = yield from aioredis.create_redis( ('localhost', 6379)) ch1, ch2 = yield from sub.subscribe('channel:1', 'channel:2') assert isinstance(ch1, aioredis.Channel) assert isinstance(ch2, aioredis.Channel) @asyncio.coroutine def async_reader(channel): while (yield from channel.wait_message()): msg = yield from channel.get(encoding='utf-8') # ... process message ... print("message in {}: {}".format(channel.name, msg)) tsk1 = asyncio.async(async_reader(ch1)) # Or alternatively: @asyncio.coroutine def async_reader2(channel): while True: msg = yield from channel.get(encoding='utf-8') if msg is None: break # ... process message ... print("message in {}: {}".format(channel.name, msg)) tsk2 = asyncio.async(async_reader2(ch2)) # Publish messages and terminate pub = yield from aioredis.create_redis( ('localhost', 6379)) while True: channels = yield from pub.pubsub_channels('channel:*') if len(channels) == 2: break for msg in ("Hello", ",", "world!"): for ch in ('channel:1', 'channel:2'): yield from pub.publish(ch, msg) pub.close() sub.close() yield from asyncio.sleep(0) yield from pub.wait_closed() yield from sub.wait_closed() yield from asyncio.gather(tsk1, tsk2)
def connectRedis(self): self.objAioRedis = yield from aioredis.create_redis(address=self.setAddress, loop=self.objAioLoopObj, db=self.iDb, password=self.strPwd) res = yield from self.objAioRedis.subscribe(self.strSubName) self.objSubChanel = res[0]
def go(): pub = yield from aioredis.create_redis( ('localhost', 6379)) sub = yield from aioredis.create_redis( ('localhost', 6379)) res = yield from sub.subscribe('chan:1') ch1 = res[0] tsk = asyncio.async(reader(ch1)) res = yield from pub.publish_json('chan:1', ["Hello", "world"]) assert res == 1 yield from sub.unsubscribe('chan:1') yield from tsk sub.close() pub.close()
def go(): redis = yield from aioredis.create_redis(('localhost', 6379)) yield from redis.set('my-key', 'value') val = yield from redis.get('my-key') print(val) # optinally closing underlying connection redis.close()
def run(loop): client = yield aioredis.create_redis(('localhost', 6379), loop=loop) while True: try: yield consume(client) except KeyboardInterrupt: client.close() yield client.await_closed()
def publish(topic, name): import aioredis client = yield from aioredis.create_redis(('localhost', 6379)) if __debug__: logger.debug('connected to redis server') while True: values = yield from topic.get() for v in values: client.publish_json(name, v._asdict())
def go(): redis = yield from aioredis.create_redis(('localhost', 6379), loop=loop) bloom = BloomFilter(redis, 100000, 0.0001) yield from bloom.add('python') yield from bloom.add('asyncio') result = yield from bloom.contains('tornado') print(result) redis.close()
def subscribe(self, channel_name): """ Create async redis client and subscribe to the given PUB/SUB channel. Listen to the message and launch publish handler :param channel_name: string respresenting Redis PUB/SUB channel name :return: """ try: self._redis = yield aioredis.create_redis((ConfigParser.get('wx.redis.config','host'),ConfigParser.get('wx.redis.config','port'))) self._redis_pub = yield aioredis.create_redis((ConfigParser.get('wx.redis.config','host'),ConfigParser.get('wx.redis.config','port'))) except aioredis.MultiExecError: print('Failed to conect to Redis Server at {}:{}'.format(ConfigParser.get('wx.redis.config','host'),ConfigParser.get('wx.redis.config','port'))) else: channels = yield self._redis.subscribe(channel_name) print('Subscribed to "{}" Redis channel'.format(channel_name)) self._channel = channels[0] yield self.listen_redis()
def redis_async(request, event_loop): conn = event_loop.run_until_complete( aioredis.create_redis(('localhost', 6379), encoding="utf-8", db=10)) def redis_async_cleanup(): conn.close() request.addfinalizer(redis_async_cleanup) return conn
def aioredis_connection(): if sys.version_info <= (3, 5): pytest.skip() import aioredis connection_coroutine = aioredis.create_redis(('localhost', 6379)) return connection_coroutine, ring.aioredis
def go(): redis = yield from aioredis.create_redis( ('localhost', 6379)) yield from redis.set('my-key', 'value') val = yield from redis.get('my-key') print(val) # optinally closing underlying connection redis.close()
def init_connections(): redis_map = dict() for p in redis_set: r = cls.loads(p) redis_map[p] = yield from aioredis.create_redis( (r["host"], r["port"]), db=r["db"], password=r.get("password")) cls.attach(redis_map, objects) redis_completed.send(cls)
def get_async_redis(): """ initialize an asyncronous redis connection """ global ASYNCREDIS if ASYNCREDIS is None or ASYNCREDIS.closed: # pragma: no branch address = REDIS_PUBSUB["address"] db = REDIS_PUBSUB["db"] password = REDIS_PUBSUB["password"] ASYNCREDIS = yield from aioredis.create_redis(address, db=db, password=password) return ASYNCREDIS
def __int__(self, is_coap): sub = loop.run_until_complete( aioredis.create_redis('redis://redis:6379')) if is_coap: [ch] = loop.run_until_complete(sub.subscribe('ch:1')) loop.create_task(self.sub_command(ch)) else: [ch] = loop.run_until_complete(sub.subscribe('ch:2')) loop.create_task(self.sub_status(ch))
def go(): redis = yield from aioredis.create_redis( ('localhost', 6379), loop=loop) bloom = BloomFilter(redis, 100000, 0.0001) yield from bloom.add('python') yield from bloom.add('asyncio') result = yield from bloom.contains('tornado') print(result) redis.close()
def go(): redis = yield from create_redis(('localhost', 6379)) queue = Queue('my_async_queue', connection=redis) job = yield from queue.enqueue( http_client.fetch_page, 'https://www.python.org') yield from asyncio.sleep(5) result = yield from job.result assert '</html>' in result, 'Given content is not a html page' print('Well done, Turner!') redis.close()
def init_with_loop(self, loop): self.redis = loop.run_until_complete( aioredis.create_redis( (options.redis_host, options.redis_port), loop=loop ) ) self.cache = RedisCacheBackend(self.redis) es_hosts = [x.strip() for x in options.es_hosts.split(',')] self.es = AsyncElasticsearch(hosts=es_hosts, loop=loop)
def test_release_bad_connection(self): pool = yield from create_pool( ('localhost', self.redis_port), loop=self.loop) yield from pool.acquire() other_conn = yield from create_redis( ('localhost', self.redis_port), loop=self.loop) with self.assertRaises(AssertionError): pool.release(other_conn)
def init(loop, app, handler): for route in route_map: app.router.add_route(*route) app.logger.info('Create redis connection') app.redis = yield from aioredis.create_redis(('localhost', 6379), loop=loop) srv = yield from loop.create_server(handler, '127.0.0.1', 8080) return srv
def is_token_in_cache(token): """Check token is in cache. """ conn = yield from aioredis.create_redis(('localhost', 6379), encoding='utf-8') try: print('Checking token: {}'.format(token)) res = yield from conn.sismember('access_tokens', token) return res finally: conn.close()
def go(): redis = yield from aioredis.create_redis(('localhost', 6379)) yield from redis.mset('key:1', 'value1', 'key:2', 'value2') cur = b'0' # set initial cursor to 0 while cur: cur, keys = yield from redis.scan(cur, match='key:*') print("Iteration results:", keys) redis.close() yield from redis.wait_closed()
def main(host, port): logging.info('Got params connection host {0}, port {1}'.format(host, port)) loop = asyncio.get_event_loop() while True: title = loop.run_until_complete(get_current_song(host, port)) if title: redis = loop.run_until_complete(create_redis(('localhost', 6379))) loop.run_until_complete(redis.publish('CHANNEL', json.dumps(title))) loop.close() return False
def go(): redis = yield from aioredis.create_redis( ('localhost', 6379)) yield from redis.delete('foo', 'bar') tr = redis.multi_exec() fut1 = tr.incr('foo') fut2 = tr.incr('bar') res = yield from tr.execute() res2 = yield from asyncio.gather(fut1, fut2) print(res) assert res == res2
def sanic_server(loop, app, test_server): app.redis = loop.run_until_complete( aioredis.create_redis("redis://{}:{}".format(app.config.DB_HOST, app.config.DB_PORT), db=TEST_DB, encoding="utf-8")) logging.debug("Test server configured with Redis instance: {}".format( app.redis)) loop.run_until_complete(app.redis.flushdb()) controller.set_db(app.redis) return loop.run_until_complete(test_server(app))
def init_with_loop(self, loop, config): redis_db = 0 try: redis_db = int(config.get('redis.db')) except ValueError: print("redis.db must be integer. Use default value(0)") self.redis = loop.run_until_complete( aioredis.create_redis( (config.get('redis.host') or 'localhost', config.get('redis.port') or 6379), db=int(config.get('redis.db')) or 0, loop=loop))
def subscribe(self, channel_name): """ Create async redis client and subscribe to the given PUB/SUB channel. Listen to the messages and launch publish handler. :param channel_name: string representing Redis PUB/SUB channel name """ self._redis = yield aioredis.create_redis(('localhost', 6379)) channels = yield self._redis.subscribe(channel_name) print('Subscribed to "{}" Redis channel.'.format(channel_name)) self._channel = channels[0] yield self.listen_redis()
def call(self, name, *args, **kwargs): loop = asyncio.get_event_loop() self.client = yield from aioredis.create_redis(("localhost", 6379), loop=loop) d = {"fn": name, "uuid": str(uuid.uuid4()), "args": args, "kwargs": kwargs} yield from self.client.rpush("fn_%s" % name, json.dumps(d)) ret = yield from self.client.blpop("fn_result_%s" % d["uuid"], 0) ret_val = ret[1].decode("utf-8") result = json.loads(ret_val) if "error" in result: raise Exception(result["error"]) print("result", result.get("result")) return result["result"]
def go(): redis = yield from aioredis.create_redis(("localhost", 6379)) yield from redis.delete("foo", "bar") tr = redis.multi_exec() fut1 = tr.incr("foo") fut2 = tr.incr("bar") res = yield from tr.execute() res2 = yield from asyncio.gather(fut1, fut2) print(res) assert res == res2 redis.close() yield from redis.wait_closed()
def use_connection(redis=None, **kwargs): """Clears the stack and uses the given connection. Protects against mixed use of use_connection() and stacked connection contexts. """ assert len(_connection_stack) <= 1, \ 'You should not mix Connection contexts with use_connection()' release_local(_connection_stack) if redis is None: redis = yield from create_redis(**kwargs) push_connection(redis)
def go(): redis = yield from aioredis.create_redis( ('localhost', 6379)) yield from redis.delete('foo', 'bar') tr = redis.multi_exec() fut1 = tr.incr('foo') fut2 = tr.incr('bar') res = yield from tr.execute() res2 = yield from asyncio.gather(fut1, fut2) print(res) assert res == res2 redis.close() yield from redis.wait_closed()
def __init__(self, io_loop: asyncio.AbstractEventLoop = None): super().__init__() self.io_loop = io_loop or asyncio.get_event_loop() self.sub_client = self.io_loop.run_until_complete( aioredis.create_redis((config.get('REDIS', 'host', fallback='localhost'), config.getint('REDIS', 'port', fallback=6379)), db=config.getint('REDIS', 'db', fallback=1))) self.redis_client = redis.StrictRedis(db=config.getint('REDIS', 'db', fallback=1), decode_responses=True) self.initialized = False self.sub_tasks = list() self.sub_channels = list() self.channel_router = dict() self._register_channel()
def register_sub(self, subscriber_addr, key): yield from self._lock.acquire() try: connections = self._dispatcher.publisher.transport.connections() if subscriber_addr in connections: self._lock.release() return self._dispatcher.publisher.transport.connect(subscriber_addr) redis = yield from aioredis.create_redis( (settings.REDIS_HOST, settings.REDIS_PORT)) yield from redis.set(key, 1) redis.close() finally: self._lock.release()
def main(): redis = yield from aioredis.create_redis(('localhost', 6379)) @asyncio.coroutine def transaction(): tr = redis.multi_exec() future1 = tr.set('foo', '123') future2 = tr.set('bar', '321') result = yield from tr.execute() assert result == (yield from asyncio.gather(future1, future2)) return result redis.close() yield from redis.wait_closed()
def _get_connection(self): """ create new Redis connection for more details see create_redis function in aioredis module this function is coroutine """ connection = yield from aioredis.create_redis( self.address, db=self.db, password=self.password, ssl=self.ssl, encoding=self.encoding, loop=self.loop ) return connection
def __call__(self, request): yield from self.validation_part(request) ws = web.WebSocketResponse() ws.start(request) pub = yield from aioredis.create_redis(redis_address) sub = yield from aioredis.create_redis(redis_address) channel = "thread_{0}_messages".format(self.thread_id) sub_channel_list = yield from sub.subscribe(channel) zero_sub_channel = sub_channel_list[0] # --------------------------------------------------------------------- print("Connection opened") try: # Kick off both coroutines in parallel, # and then block until both are completed. yield from asyncio.gather( self.handle_ws_part(ws, pub, channel), self.handle_redis_part(ws, zero_sub_channel) ) except Exception: print("") print("except Exception:") print("--------------------------------") traceback.print_exc() print("--------------------------------") finally: sub.close() pub.close() print("Connection closed") return ws
def fake_aioredis(request: FixtureRequest, loop: asyncio.BaseEventLoop): r: aioredis.Redis = loop.run_until_complete( aioredis.create_redis('redis://localhost:6379', db=2, encoding='utf-8')) loop.run_until_complete(r.flushdb()) redis_objects = filter( lambda i: i != db.RedisObject and issubclass(i, db.RedisObject), filter(lambda i: type(i) == type, map(lambda i: getattr(db, i), dir(db)))) for redis_object in redis_objects: redis_object.register_db(r) yield r loop.run_until_complete(r.flushdb()) r._pool_or_conn.close() loop.run_until_complete(r._pool_or_conn.wait_closed())
def register_sub(self, subscriber_addr, key): yield from self._lock.acquire() try: connections = self._dispatcher.publisher.transport.connections() if subscriber_addr in connections: self._lock.release() return self._dispatcher.publisher.transport.connect(subscriber_addr) redis = yield from aioredis.create_redis( (settings.REDIS_HOST, settings.REDIS_PORT) ) yield from redis.set(key, 1) redis.close() finally: self._lock.release()
def main(): redis = yield from aioredis.create_redis( ('localhost', 6379)) @asyncio.coroutine def transaction(): tr = redis.multi_exec() future1 = tr.set('foo', '123') future2 = tr.set('bar', '321') result = yield from tr.execute() assert result == (yield from asyncio.gather(future1, future2)) return result redis.close() yield from redis.wait_closed()
def publish(): pub = yield from aioredis.create_redis(('localhost', 6379)) while not tsk.done(): # wait for clients to subscribe while True: subs = yield from pub.pubsub_numsub('channel:1') if subs[b'channel:1'] == 1: break yield from asyncio.sleep(0, loop=loop) # publish some messages for msg in ['one', 'two', 'three']: yield from pub.publish('channel:1', msg) # send stop word yield from pub.publish('channel:1', STOPWORD) pub.close() yield from pub.wait_closed()
def run(loop): client = yield aioredis.create_redis(('localhost', 6379), loop=loop) tasks = [] fs = [] for i in range(50000): task_id = produce(i, client) fs.append(sync(client, i, task_id)) if i % 1000: yield tornado.gen.sleep(0.01) print('All tasks sent') yield from fs client.close() yield client.wait_closed()
def publish(): pub = yield from aioredis.create_redis( ('localhost', 6379)) while not tsk.done(): # wait for clients to subscribe while True: subs = yield from pub.pubsub_numsub('channel:1') if subs[b'channel:1'] == 1: break yield from asyncio.sleep(0, loop=loop) # publish some messages for msg in ['one', 'two', 'three']: yield from pub.publish('channel:1', msg) # send stop word yield from pub.publish('channel:1', STOPWORD) pub.close() yield from pub.wait_closed()
def __init__(self, io_loop: asyncio.AbstractEventLoop = None): super().__init__() self.io_loop = io_loop or asyncio.get_event_loop() self.sub_client = self.io_loop.run_until_complete( aioredis.create_redis( (config.get('REDIS', 'host', fallback='localhost'), config.getint('REDIS', 'port', fallback=6379)), db=config.getint('REDIS', 'db', fallback=1))) self.redis_client = redis.StrictRedis(db=config.getint('REDIS', 'db', fallback=1), decode_responses=True) self.initialized = False self.sub_tasks = list() self.sub_channels = list() self.channel_router = dict() self._register_channel()
def test_script_kill(self): script = "while (1) do redis.call('TIME') end" other_redis = yield from create_redis( ('localhost', self.redis_port), loop=self.loop) yield from self.add('key1', 'value') fut = other_redis.eval(script, keys=['non-existent-key'], args=[10]) yield from asyncio.sleep(0, loop=self.loop) resp = yield from self.redis.script_kill() self.assertTrue(resp) with self.assertRaises(ReplyError): yield from fut with self.assertRaises(ReplyError): yield from self.redis.script_kill()
def main(): loop = asyncio.get_event_loop() redis = loop.run_until_complete( aioredis.create_redis("/work/target/redis.sock", loop=loop)) #redis = loop.run_until_complete( # aioredis.create_redis(("127.0.0.1", 3001), loop=loop)) @asyncio.coroutine def dispatch(loop, req): n = yield from redis.incr("hello-world-counter") req.reply( [200, u"OK"], {u"Content-Type": b"text/html"}, "Hello page opened {} times".format(n).encode('utf-8')) sock = http.Http('0.0.0.0', 3000) aio.start(dispatch, loop=loop) loop.run_forever()