async def test_redis_sentinel_failure(create_pool, cancel_remaining_task, mocker): settings = RedisSettings() settings.host = [('localhost', 6379), ('localhost', 6379)] settings.sentinel = True with pytest.raises(ResponseError, match='unknown command `SENTINEL`'): await create_pool(settings)
async def test_redis_sentinel_failure(): settings = RedisSettings() settings.host = [('localhost', 6379), ('localhost', 6379)] settings.sentinel = True try: pool = await create_pool(settings) await pool.ping('ping') except Exception as e: assert 'unknown command `SENTINEL`' in str(e)
async def test_redis_sentinel_failure(create_pool): """ FIXME: this is currently causing 3 "Task was destroyed but it is pending!" warnings """ settings = RedisSettings() settings.host = [('localhost', 6379), ('localhost', 6379)] settings.sentinel = True try: pool = await create_pool(settings) await pool.ping('ping') except Exception as e: assert 'unknown command `SENTINEL`' in str(e)
def parse_redis_settings(cls, v): if v is None: return if RedisSettings.__module__ != 'arq.connections': raise RuntimeError(f'arq must be installed to use redis, redis_settings set to {v!r}') return RedisSettings.from_dsn(v)
def test_settings_changed(): settings = RedisSettings(password='******') assert settings.password == '123' assert ( "<RedisSettings address=('localhost', 6379) database=0 password=123 ssl=None conn_timeout=1 conn_retries=5 " "conn_retry_delay=1 sentinel=False sentinel_master=mymaster>" ) == str(settings)
def test_settings_changed(): settings = RedisSettings(port=123) assert settings.port == 123 assert ( "RedisSettings(host='localhost', port=123, database=0, password=None, ssl=None, conn_timeout=1, " "conn_retries=5, conn_retry_delay=1, sentinel=False, sentinel_master='mymaster')" ) == str(settings)
async def main(): redis = await create_pool(RedisSettings()) # no id, random id will be generated job1 = await redis.enqueue_job('the_task') print(job1) """ > <arq job 99edfef86ccf4145b2f64ee160fa3297> """ # random id again, again the job will be enqueued and a job will be returned job2 = await redis.enqueue_job('the_task') print(job2) """ > <arq job 7d2163c056e54b62a4d8404921094f05> """ # custom job id, job will be enqueued job3 = await redis.enqueue_job('the_task', _job_id='foobar') print(job3) """ > <arq job foobar> """ # same custom job id, job will not be enqueued and enqueue_job will return None job4 = await redis.enqueue_job('the_task', _job_id='foobar') print(job4) """
async def main(): redis = await create_pool( RedisSettings(), job_serializer=msgpack.packb, job_deserializer=lambda b: msgpack.unpackb(b, raw=False), ) await redis.enqueue_job('the_task')
async def startup(): global task_queue task_queue = await arq.create_pool( RedisSettings(host="redis"), job_serializer=lambda x: serialize(x).to_buffer().to_pybytes(), job_deserializer=deserialize, )
def test_settings_changed(): settings = RedisSettings(port=123) assert settings.port == 123 assert ( '<RedisSettings host=localhost port=123 database=0 password=None ssl=None conn_timeout=1 conn_retries=5 ' 'conn_retry_delay=1 sentinel=False sentinel_master=mymaster>' ) == str(settings)
def __init__(self): Borg.__init__(self) database_user = get_docker_secret("seraphsix_pg_db_user", default="seraphsix") database_password = get_docker_secret("seraphsix_pg_db_pass") database_host = get_docker_secret("seraphsix_pg_db_host", default="localhost") database_port = get_docker_secret("seraphsix_pg_db_port", default="5432") database_name = get_docker_secret("seraphsix_pg_db_name", default="seraphsix") self.database_conns = get_docker_secret("seraphsix_pg_db_conns", default=DB_MAX_CONNECTIONS, cast_to=int) database_auth = f"{database_user}:{database_password}" self.database_url = f"postgres://{database_auth}@{database_host}:{database_port}/{database_name}" redis_password = get_docker_secret("seraphsix_redis_pass") redis_host = get_docker_secret("seraphsix_redis_host", default="localhost") redis_port = get_docker_secret("seraphsix_redis_port", default="6379") self.redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}" self.arq_redis = RedisSettings.from_dsn(f"{self.redis_url}/1") self.destiny = DestinyConfig() self.the100 = The100Config() self.twitter = TwitterConfig() self.discord_api_key = get_docker_secret("discord_api_key") self.home_server = get_docker_secret("home_server", cast_to=int) self.log_channel = get_docker_secret("home_server_log_channel", cast_to=int) self.reg_channel = get_docker_secret("home_server_reg_channel", cast_to=int) self.enable_activity_tracking = get_docker_secret( "enable_activity_tracking", cast_to=bool) self.flask_app_key = (os.environb[b"FLASK_APP_KEY"].decode( "unicode-escape").encode("latin-1")) self.activity_cutoff = get_docker_secret("activity_cutoff") if self.activity_cutoff: self.activity_cutoff = datetime.strptime( self.activity_cutoff, "%Y-%m-%d").astimezone(tz=pytz.utc) self.root_log_level = get_docker_secret("root_log_level", default=ROOT_LOG_LEVEL, cast_to=str) bucket_kwargs = { "redis_pool": ConnectionPool.from_url(self.redis_url), "bucket_name": "ratelimit", } destiny_api_rate = RequestRate(20, Duration.SECOND) self.destiny_api_limiter = Limiter(destiny_api_rate, bucket_class=RedisBucket, bucket_kwargs=bucket_kwargs)
class AgentSettings: functions = [invoke_automation] queue_name = config.settings.agent_name job_timeout = config.settings.job_timeout max_jobs = config.settings.max_jobs redis_settings = RedisSettings(host=config.settings.redis_host, port=config.settings.redis_port, password=config.settings.redis_password)
class WorkerSettings: functions = [hello_world] on_startup = startup on_shutdown = shutdown ctx = {"config": app_config} max_jobs = 4 redis_settings = RedisSettings(host=app_config.redis_host, database=app_config.redis_db)
async def test_enqueue_job_nondefault_queue(worker): """Test initializing arq_redis with a queue name, and the worker using it.""" arq_redis = await create_pool(RedisSettings(), default_queue_name='test_queue') await test_enqueue_job( arq_redis, lambda functions, **_: worker(functions=functions, arq_redis=arq_redis, queue_name=None), queue_name=None, )
def parse_redis_settings(cls, v): conf = urlparse(v) return RedisSettings( host=conf.hostname, port=conf.port, password=conf.password, database=int((conf.path or '0').strip('/')), )
async def test_remain_keys(arq_redis: ArqRedis, worker, create_pool): redis2 = await create_pool(RedisSettings()) await arq_redis.enqueue_job('foobar', _job_id='testing') assert sorted(await redis2.keys('*')) == [b'arq:job:testing', b'arq:queue'] worker: Worker = worker(functions=[foobar]) await worker.main() assert sorted(await redis2.keys('*')) == [b'arq:queue:health-check', b'arq:result:testing'] await worker.close() assert sorted(await redis2.keys('*')) == [b'arq:result:testing']
class TimingWorkerSettings: on_startup = startup on_shutdown = shutdown queue_name = ArqQueue.timing.value redis_settings = RedisSettings(**settings.ARQ) cron_jobs = [ cron(timing_monitor, hour=0, minute=0), # cron(timing_, minute={x for x in range(0, 60, 3)}) ]
def parse_redis_settings(cls, v): if v is None: return if RedisSettings.__module__ != 'arq.connections': raise RuntimeError(f'arq must be installed to use redis, redis_settings set to {v!r}') conf = urlparse(v) return RedisSettings( host=conf.hostname, port=conf.port, password=conf.password, database=int((conf.path or '0').strip('/')) )
async def connect_redis(self): logger.info( "Connecting to redis on host" f"{RedisConfig().redis_host}:{RedisConfig().redis_host}" ) settings = RedisSettings( host=RedisConfig().redis_host, port=RedisConfig().redis_port ) self.redis = await create_pool(settings) logger.info(f"Connected to redis server with settings {settings}.")
class WorkerSettings: cron_jobs = [] functions = [] job_timeout = 10 # seconds max_jobs = 10 max_tries = 2 on_shutdown = shutdown on_startup = startup poll_delay = 1 # second redis_settings = RedisSettings.from_dsn(settings.redis_dsn)
async def main(): redis = await create_pool(RedisSettings()) # deferred by 10 seconds await redis.enqueue_job('the_task', _defer_by=10) # deferred by 1 minute await redis.enqueue_job('the_task', _defer_by=timedelta(minutes=1)) # deferred until jan 28th 2032, you'll be waiting a long time for this... await redis.enqueue_job('the_task', _defer_until=datetime(2032, 1, 28))
class AgentSettings: """ Settings used by arq, for more info see: https://arq-docs.helpmanual.io/ """ functions = [invoke_automation] queue_name = config.settings.agent_name job_timeout = config.settings.job_timeout max_jobs = config.settings.max_jobs redis_settings = RedisSettings(host=config.settings.redis_host, port=config.settings.redis_port, password=config.settings.redis_password)
async def test_redis_success_log(caplog, create_pool): caplog.set_level(logging.INFO) settings = RedisSettings() pool = await create_pool(settings) assert 'redis connection successful' not in [r.message for r in caplog.records] pool.close() await pool.wait_closed() pool = await create_pool(settings, retry=1) assert 'redis connection successful' in [r.message for r in caplog.records] pool.close() await pool.wait_closed()
class WorkerSettings: redis_settings = RedisSettings(host="redis") functions = [predict] job_serializer = lambda x: serialize(x).to_buffer().to_pybytes() job_deserializer = deserialize # async def main(): # redis = await arq.create_pool(RedisSettings(host="cookiecutter-redis")) # if __name__ == "__main__": # asyncio.get_event_loop().run_until_complete(main())
async def init() -> None: # 初始化redis await AsyncRedisUtil.init(**settings.REDIS) # 初始化arq arq = await create_pool(RedisSettings(**settings.ARQ)) # # 初始化admin_app admin_app.init(user_model='Admin', admin_secret=settings.ADMIN_SECRET, tortoise_app='models', site=site) for app in [main_app, *sub_apps]: app.arq = arq
async def test_redis_log(create_pool): redis = await create_pool(RedisSettings()) await redis.flushall() await redis.set(b'a', b'1') await redis.set(b'b', b'2') log_msgs = [] def _log(s): log_msgs.append(s) await log_redis_info(redis, _log) assert len(log_msgs) == 1 assert re.search(r'redis_version=\d\.', log_msgs[0]), log_msgs assert log_msgs[0].endswith(' db_keys=2')
def __init__(self): Borg.__init__(self) database_user = get_docker_secret('seraphsix_pg_db_user', default='seraphsix') database_password = get_docker_secret('seraphsix_pg_db_pass') database_host = get_docker_secret('seraphsix_pg_db_host', default='localhost') database_port = get_docker_secret('seraphsix_pg_db_port', default='5432') database_name = get_docker_secret('seraphsix_pg_db_name', default='seraphsix') self.database_conns = get_docker_secret('seraphsix_pg_db_conns', default=DB_MAX_CONNECTIONS, cast_to=int) database_auth = f"{database_user}:{database_password}" self.database_url = f"postgres://{database_auth}@{database_host}:{database_port}/{database_name}" redis_password = get_docker_secret('seraphsix_redis_pass') redis_host = get_docker_secret('seraphsix_redis_host', default='localhost') redis_port = get_docker_secret('seraphsix_redis_port', default='6379') self.redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}" self.arq_redis = RedisSettings.from_dsn(f'{self.redis_url}/1') self.destiny = DestinyConfig() self.the100 = The100Config() self.twitter = TwitterConfig() self.discord_api_key = get_docker_secret('discord_api_key') self.home_server = get_docker_secret('home_server', cast_to=int) self.log_channel = get_docker_secret('home_server_log_channel', cast_to=int) self.reg_channel = get_docker_secret('home_server_reg_channel', cast_to=int) self.enable_activity_tracking = get_docker_secret('enable_activity_tracking', cast_to=bool) self.flask_app_key = os.environb[b'FLASK_APP_KEY'].decode('unicode-escape').encode('latin-1') self.activity_cutoff = get_docker_secret('activity_cutoff') if self.activity_cutoff: self.activity_cutoff = datetime.strptime(self.activity_cutoff, '%Y-%m-%d').astimezone(tz=pytz.utc) self.root_log_level = get_docker_secret('root_log_level', default=ROOT_LOG_LEVEL, cast_to=str) bucket_kwargs = { "redis_pool": ConnectionPool.from_url(self.redis_url), "bucket_name": "ratelimit" } destiny_api_rate = RequestRate(20, Duration.SECOND) self.destiny_api_limiter = Limiter(destiny_api_rate, bucket_class=RedisBucket, bucket_kwargs=bucket_kwargs)
async def main(): redis = await create_pool(RedisSettings()) job = await redis.enqueue_job('the_task') # get the job's id print(job.job_id) """ > 68362958a244465b9be909db4b7b5ab4 (or whatever) """ # get information about the job, will include results if the job has finished, but # doesn't await the job's result debug(await job.info()) """ > docs/examples/job_results.py:23 main JobDef( function='the_task', args=(), kwargs={}, job_try=None, enqueue_time=datetime.datetime(2019, 4, 23, 13, 58, 56, 781000), score=1556027936781 ) (JobDef) """ # get the Job's status print(await job.status()) """ > JobStatus.queued """ # poll redis for the job result, if the job raised an exception, # it will be raised here # (You'll need the worker running at the same time to get a result here) print(await job.result(timeout=5)) """
async def main(): redis = await create_pool(RedisSettings()) job = await redis.enqueue_job('the_task') # get the job's id print(job.job_id) """ > 68362958a244465b9be909db4b7b5ab4 (or whatever) """ # get information about the job, will include results if the job has finished, but # doesn't await the job's result debug(await job.info()) """ > docs/examples/job_results.py:23 main { 'enqueue_time': datetime.datetime(2019, 3, 3, 12, 32, 19, 975000), 'function': 'the_task', 'args': (), 'kwargs': {}, 'score': 1551616339975, } (dict) len=5 """ # get the Job's status print(await job.status()) """ > JobStatus.queued """ # poll redis for the job result, if the job raised an exception, # it will be raised here # (You'll need the worker running at the same time to get a result here) print(await job.result(timeout=5)) """
async def main(): redis = await create_pool(RedisSettings()) for url in ('https://facebook.com', 'https://microsoft.com', 'https://github.com'): await redis.enqueue_job('download_content', url)