def get_filter_redis(): redis_args = get_redis_args() filter_redis_uri = "redis://:{}@{}:{}/{}".format(redis_args.get('password'), redis_args.get('host'), redis_args.get('port'), redis_args.get('filter_db')) pool = ConnectionPool.from_url(filter_redis_uri) filter_redis = StrictRedis(connection_pool=pool) return filter_redis
#!/usr/bin/env python # -*- coding: utf-8 -*- # Created by Charles on 19-3-27 # Function: import redis from config import get_redis_args redis_info = get_redis_args() pool_broker = redis.ConnectionPool(host=redis_info.get('host', '127.0.0.1'), port=redis_info.get('port', 6379), password=redis_info.get('password'), db=redis_info.get('broker', 5), decode_responses=True) pool_backend = redis.ConnectionPool(host=redis_info.get('host'), port=redis_info.get('port'), password=redis_info.get('password'), db=redis_info.get('backend', 6), decode_responses=True) pool_cache = redis.ConnectionPool(host=redis_info.get('host'), port=redis_info.get('port'), password=redis_info.get('password'), db=redis_info.get('cache', 1), decode_responses=True) class RedisOpt: broker_db = redis.Redis(connection_pool=pool_broker) backend_db = redis.Redis(connection_pool=pool_backend) cache_db = redis.Redis(connection_pool=pool_cache) @classmethod
from celery import Celery from kombu import Queue, Exchange from config import get_redis_args import os redis_args = get_redis_args() broker_uri = "redis://:{}@{}:{}/{}".format(redis_args.get('password'), redis_args.get('host'), redis_args.get('port'), redis_args.get('broker_db')) backend_uri = "redis://:{}@{}:{}/{}".format(redis_args.get('password'), redis_args.get('host'), redis_args.get('port'), redis_args.get('backend_db')) tasks = ["tasks.brand", "tasks.area"] app = Celery("test_celery", broker=broker_uri, include=tasks) worker_log_path = os.path.join(os.path.dirname(os.path.dirname(__file__))+'/logs', 'worker.log') app.conf.update( CELERY_TIMEZONE='Asia/Shanghai', CELERY_ENABLE_UTC=True, CELERY_LOG_FILE=worker_log_path, CELERY_ACCEPT_CONTENT=['json'], CELERY_RESULT_SERIALIZER='json', CELERY_TASK_SERIALIZER='json', CELERY_QUEUE=( Queue("area", exchange=Exchange("exchange1", type="direct"), routing_key="route1"), Queue("queue2", exchange=Exchange("exchange2", type="direct"), routing_key="route2"), ) )
import json import socket import datetime import redis from redis.sentinel import Sentinel from logger import crawler from config import (get_redis_args, get_share_host_count, get_running_mode, get_cookie_expire_time) MODE = get_running_mode() SHARE_HOST_COUNT = get_share_host_count() REDIS_ARGS = get_redis_args() password = REDIS_ARGS.get('password', '') cookies_db = REDIS_ARGS.get('cookies', 1) urls_db = REDIS_ARGS.get('urls', 2) broker_db = REDIS_ARGS.get('broker', 5) backend_db = REDIS_ARGS.get('backend', 6) id_name_db = REDIS_ARGS.get('id_name', 8) cookie_expire_time = get_cookie_expire_time() data_expire_time = REDIS_ARGS.get('expire_time') * 60 * 60 sentinel_args = REDIS_ARGS.get('sentinel', '') if sentinel_args: # default socket timeout is 2 secs master_name = REDIS_ARGS.get('master') socket_timeout = int(REDIS_ARGS.get('socket_timeout', 2)) sentinel = Sentinel([(args['host'], args['port']) for args in sentinel_args],
import redis from redis.sentinel import Sentinel from logger import crawler from config import ( get_redis_args, get_share_host_count, get_running_mode, get_cookie_expire_time ) MODE = get_running_mode() SHARE_HOST_COUNT = get_share_host_count() REDIS_ARGS = get_redis_args() password = REDIS_ARGS.get('password', '') cookies_db = REDIS_ARGS.get('cookies', 1) urls_db = REDIS_ARGS.get('urls', 2) broker_db = REDIS_ARGS.get('broker', 5) backend_db = REDIS_ARGS.get('backend', 6) id_name_db = REDIS_ARGS.get('id_name', 8) cookie_expire_time = get_cookie_expire_time() data_expire_time = REDIS_ARGS.get('expire_time') * 60 * 60 sentinel_args = REDIS_ARGS.get('sentinel', '') if sentinel_args: # default socket timeout is 2 secs master_name = REDIS_ARGS.get('master') socket_timeout = int(REDIS_ARGS.get('socket_timeout', 2))