コード例 #1
0
ファイル: log_utils.py プロジェクト: hysds/hysds
def set_redis_revoked_task_pool():
    """Set redis connection pool for worker status."""

    global REVOKED_TASK_POOL
    if REVOKED_TASK_POOL is None:
        REVOKED_TASK_POOL = BlockingConnectionPool.from_url(
            app.conf.REDIS_JOB_STATUS_URL)
コード例 #2
0
ファイル: log_utils.py プロジェクト: fgreg/hysds
def set_redis_worker_status_pool():
    """Set redis connection pool for worker status."""

    global WORKER_STATUS_POOL
    if WORKER_STATUS_POOL is None:
        WORKER_STATUS_POOL = BlockingConnectionPool.from_url(
            app.conf.REDIS_JOB_STATUS_URL)
コード例 #3
0
ファイル: log_utils.py プロジェクト: fgreg/hysds
def set_redis_job_info_pool():
    """Set redis connection pool for job info metrics."""

    global JOB_INFO_POOL
    if JOB_INFO_POOL is None:
        JOB_INFO_POOL = BlockingConnectionPool.from_url(
            app.conf.REDIS_JOB_INFO_URL)
コード例 #4
0
def create_app(config='app.configs.DevConfig', redis=None):
    app = Flask(__name__)
    app.config.from_object(config)

    for i in namespaces:
        api.add_namespace(i)

    api_bp = Blueprint('api_blueprint', __name__)
    api.init_app(api_bp)
    db.init_app(app)
    cors.init_app(app)
    jwt.init_app(app)
    mail.init_app(app)
    migrate.init_app(app, db)
    init_celery(app)

    redis = redis or Redis(connection_pool=BlockingConnectionPool.from_url(
        app.config['REDIS_URL'], decode_responses=True), )
    refresh_tokens.init_app(redis, ttl=app.config['JWT_REFRESH_TOKEN_EXPIRES'])
    email_confirm_tokens.init_app(redis,
                                  ttl=app.config['JWT_ACCESS_TOKEN_EXPIRES'])
    change_password_tokens.init_app(redis,
                                    ttl=app.config['JWT_ACCESS_TOKEN_EXPIRES'])
    access_tokens_blacklist.init_app(
        redis, ttl=app.config['JWT_ACCESS_TOKEN_EXPIRES'])

    app.register_blueprint(api_bp, url_prefix=app.config['API_PREFIX'])

    register_jwt(jwt)

    for c in commands:
        app.cli.add_command(c)

    return app
コード例 #5
0
ファイル: log_utils.py プロジェクト: fgreg/hysds
def set_redis_event_status_pool():
    """Set redis connection pool for event status."""

    global EVENT_STATUS_POOL
    if EVENT_STATUS_POOL is None:
        EVENT_STATUS_POOL = BlockingConnectionPool.from_url(
            app.conf.REDIS_JOB_STATUS_URL)
コード例 #6
0
ファイル: log_utils.py プロジェクト: fgreg/hysds
def set_redis_job_status_pool():
    """Set redis connection pool for job status."""

    global JOB_STATUS_POOL
    if JOB_STATUS_POOL is None:
        JOB_STATUS_POOL = BlockingConnectionPool.from_url(
            app.conf.REDIS_JOB_STATUS_URL)
コード例 #7
0
ファイル: log_utils.py プロジェクト: hysds/hysds
def set_redis_socket_pool():
    """Set redis connection pool via Unix socket file."""

    global SOCKET_POOL
    if SOCKET_POOL is None:
        SOCKET_POOL = BlockingConnectionPool.from_url(
            app.conf.REDIS_UNIX_DOMAIN_SOCKET)
コード例 #8
0
def main():
    import gevent.monkey

    gevent.monkey.patch_all()
    from gevent.queue import LifoQueue

    # These imports are inside the __main__ block
    # to make sure that we only import from rq_gevent_worker
    # (which has the side effect of applying gevent monkey patches)
    # in the worker process. This way other processes can import the
    # redis connection without that side effect.
    import os
    from redis import BlockingConnectionPool, StrictRedis
    from rq import Queue, Connection
    from dallinger.heroku.rq_gevent_worker import GeventWorker as Worker

    from dallinger.config import initialize_experiment_package

    initialize_experiment_package(os.getcwd())

    import logging

    logging.basicConfig(format="%(asctime)s %(message)s", level=logging.DEBUG)
    redis_url = os.getenv("REDIS_URL", "redis://localhost:6379")
    # Specify queue class for improved performance with gevent.
    # see http://carsonip.me/posts/10x-faster-python-gevent-redis-connection-pool/
    redis_pool = BlockingConnectionPool.from_url(redis_url,
                                                 queue_class=LifoQueue)
    redis_conn = StrictRedis(connection_pool=redis_pool)

    with Connection(redis_conn):
        worker = Worker(list(map(Queue, listen)))
        worker.work()
コード例 #9
0
    def __init__(self, cfg):
        self.simhash_size = cfg['simhash']['size']
        self.simhash_expire = cfg['simhash']['expire_after']

        headers = {
            'User-Agent': 'wayback-discover-diff',
            'Accept-Encoding': 'gzip,deflate',
            'Connection': 'keep-alive'
        }
        cdx_auth_token = cfg.get('cdx_auth_token')
        if cdx_auth_token:
            headers = dict(cookie='cdx_auth_token=%s' % cdx_auth_token)

        self.http = urllib3.HTTPConnectionPool('web.archive.org',
                                               maxsize=50,
                                               retries=urllib3.Retry(
                                                   3, redirect=2),
                                               headers=headers)
        self.redis_db = StrictRedis(
            connection_pool=BlockingConnectionPool.from_url(
                cfg['redis_uri'],
                max_connections=50,
                timeout=cfg.get('redis_timeout', 10),
                decode_responses=True))
        self.tpool = ThreadPoolExecutor(max_workers=cfg['threads'])
        self.snapshots_number = cfg['snapshots']['number_per_year']
        # Initialize logger
        self._log = logging.getLogger('wayback_discover_diff.worker')
コード例 #10
0
 def __init__(self,
              redis_uri: str = "redis://localhost:6379/1",
              max_connections=20):
     self.__connection_pool = BlockingConnectionPool.from_url(
         redis_uri, max_connections=max_connections)
     self.__encoder = WormholePickleEncoder()
     self.__closed = False
     self.__send_rate = -1
     self.__receive_rate = -1
     self.stats_enabled = True
コード例 #11
0
ファイル: backends.py プロジェクト: mete0r/pyramid_walrus
def LedisFactory(settings):
    from ledis import BlockingConnectionPool
    from walrus.tusks.ledisdb import WalrusLedis

    default = get_settings('pyramid_walrus.backend.ledis.', settings, {
        'url': 'ledis://@localhost:6380/0',
        'max_connections': '10',
    })
    connection_url = default['url']
    max_connection = default['max_connections']
    max_connection = int(max_connection) if max_connection else None
    connection_pool = BlockingConnectionPool.from_url(
        url=connection_url,
        max_connections=max_connection)
    logger.debug('connection_pool: %r', connection_pool)

    def get_database_for_request(request):
        return WalrusLedis(connection_pool=connection_pool)
    return get_database_for_request
コード例 #12
0
def make_redis_region(app, prefix):
    expiration_time = app.config.setdefault('REDICA_DEFAULT_EXPIRE', 3600)
    key_mangler = functools.partial(_md5_key_mangler, prefix)
    redica_cache_url = app.config.get('REDICA_CACHE_URL')
    cfg = {
        'backend': 'extended_redis_backend',
        'expiration_time': expiration_time,
        'arguments': {
            'redis_expiration_time': expiration_time + 30,
            'key_mangler': key_mangler,
        }
    }
    if app.config.get('REDICA_CACHE_POOL_BLOCKING', True):
        cfg['arguments']['connection_pool'] = BlockingConnectionPool.from_url(
            redica_cache_url)
    else:
        cfg['arguments']['url'] = redica_cache_url

    return dict(default=make_region().configure(**cfg))
コード例 #13
0
def main():
    import gevent.monkey

    gevent.monkey.patch_all()
    from gevent.queue import LifoQueue

    # These imports are inside the __main__ block
    # to make sure that we only import from rq_gevent_worker
    # (which has the side effect of applying gevent monkey patches)
    # in the worker process. This way other processes can import the
    # redis connection without that side effect.
    import logging
    import os
    from redis import BlockingConnectionPool, StrictRedis
    from rq import Queue, Connection
    from six.moves.urllib.parse import urlparse
    from dallinger.heroku.rq_gevent_worker import GeventWorker as Worker
    from dallinger.config import initialize_experiment_package

    initialize_experiment_package(os.getcwd())

    logging.basicConfig(format="%(asctime)s %(message)s", level=logging.DEBUG)
    redis_url = os.getenv("REDIS_URL", "redis://localhost:6379")
    # Specify queue class for improved performance with gevent.
    # see http://carsonip.me/posts/10x-faster-python-gevent-redis-connection-pool/

    connection_args = {
        "url": redis_url,
        "queue_class": LifoQueue,
    }
    # Since we are generally running on Heroku, and configuring SSL certificates
    # is challenging, we disable cert requirements on secure connections.
    if urlparse(redis_url).scheme == "rediss":
        connection_args["ssl_cert_reqs"] = None
    redis_pool = BlockingConnectionPool.from_url(**connection_args)
    redis_conn = StrictRedis(connection_pool=redis_pool)

    with Connection(redis_conn):
        worker = Worker(list(map(Queue, listen)))
        worker.work()
コード例 #14
0
ファイル: __init__.py プロジェクト: niteoweb/pyramid_redis
def includeme(config):
    """Factory for Redis pyramid client."""

    dsn = config.registry.settings.get('redis.dsn')
    max_connections = config.registry.settings.get('redis.max_connections', 4)
    redis_client = None

    is_type = lambda name: dsn.startswith(name + '://')
    if any(is_type(t) for t in ['redis', 'rediss', 'unix']):
        from redis import StrictRedis

        redis_client = StrictRedis.from_url(dsn)
    elif any(is_type(t + '+blocking') for t in ['redis', 'rediss', 'unix']):
        from redis import BlockingConnectionPool
        from redis import StrictRedis

        # Strip the +blocking from dns, underlaying client
        # does not support this scheme.
        dsn = dsn.replace('+blocking:', ':')
        pool = BlockingConnectionPool.from_url(dsn,
                                               max_connections=max_connections)
        redis_client = StrictRedis(connection_pool=pool)
    elif is_type('fakeredis'):
        import fakeredis

        server = fakeredis.FakeServer()
        redis_client = fakeredis.FakeStrictRedis(server=server)

        config.registry.settings["fakeredis_server"] = server
    else:
        logger.error(
            'Redis could not be initialized, DSN %s is not supported!', dsn)

    # Create a request method that'll get the Redis client in each request.
    config.add_request_method(
        lambda request: redis_client,
        name='redis',
        reify=True,
    )
コード例 #15
0
    logging.config.dictConfig(logconf)

# Init Celery app
CELERY = Celery(config_source=CFG['celery'])
CELERY.register_task(Discover(CFG))

# Init Flask app
from . import web
APP = web.get_app(CFG)

# Initialize CORS support
cors = CFG.get('cors')
if cors:
    CORS(APP, origins=cors)

# Initialize Celery and Redis
APP.celery = CELERY
APP.redis_db = StrictRedis(
    connection_pool=BlockingConnectionPool.from_url(
        CFG['redis_uri'], max_connections=50,
        timeout=CFG.get('redis_timeout', 10),
        decode_responses=True
        )
    )

# ensure  the instance folder exists
try:
    os.makedirs(APP.instance_path)
except OSError:
    pass
コード例 #16
0
ファイル: log_utils.py プロジェクト: hysds/hysds
def set_redis_payload_hash_pool():
    """Set redis connection pool for payload hash status."""
    global PAYLOAD_HASH_POOL
    if PAYLOAD_HASH_POOL is None:
        PAYLOAD_HASH_POOL = BlockingConnectionPool.from_url(
            app.conf.REDIS_JOB_STATUS_URL)
コード例 #17
0
ファイル: log_instance_stats.py プロジェクト: hysds/hysds
def set_redis_pool(redis_url):
    """Set redis connection pool."""

    global POOL
    if POOL is None:
        POOL = BlockingConnectionPool.from_url(redis_url)