def __init__(self, old_redis_url, new_redis_url, dry_run=True,
                 per_recording_list=False, s3_import=False, s3_root=None):
        self.old_redis = StrictRedis.from_url(old_redis_url, decode_responses=True)
        self.dry_run = dry_run
        self.per_recording_list = per_recording_list
        self.s3_import = s3_import

        if s3_import:
            assert(s3_root)
            import boto3
            self.s3_root = s3_root
            self.s3 = boto3.client('s3')
        else:
            self.s3_root = None
            self.s3 = None

        if self.dry_run:
            import redis
            redis.StrictRedis = fakeredis.FakeStrictRedis
            self.redis = FakeStrictRedis.from_url(new_redis_url, decode_responses=True)
        else:
            self.redis = StrictRedis.from_url(new_redis_url, decode_responses=True)

        print('Redis Inited')

        self.cli = CLIUserManager(new_redis_url)
def render_task(dburl, docpath, slug):
    """Render a document."""
    oldcwd = os.getcwd()
    try:
        os.chdir(os.path.join(docpath, slug))
    except:
        db = StrictRedis.from_url(dburl)
        job = get_current_job(db)
        job.meta.update({'out': 'Document not found.', 'return': 127, 'status': False})
        return 127

    db = StrictRedis.from_url(dburl)
    job = get_current_job(db)
    job.meta.update({'out': '', 'milestone': 0, 'total': 1, 'return': None,
                     'status': None})
    job.save()

    p = subprocess.Popen(('lualatex', '--halt-on-error', slug + '.tex'),
                         stdout=subprocess.PIPE)

    out = []

    while p.poll() is None:
        nl = p.stdout.readline()
        out.append(nl)
        job.meta.update({'out': ''.join(out), 'return': None,
                         'status': None})
        job.save()

    out = ''.join(out)
    job.meta.update({'out': ''.join(out), 'return': p.returncode, 'status':
                     p.returncode == 0})
    job.save()
    os.chdir(oldcwd)
    return p.returncode
Exemple #3
0
def init_redis(config):
    """ Init redis from config, with fallback to localhost
    """
    try:
        rc = StrictRedis.from_url(config['redis_url'])
        rc.ping()
    except:
        rc = StrictRedis.from_url('redis://localhost/')
        rc.ping()

    return rc
Exemple #4
0
def redis(redis_dsn):
    if redis_dsn:
        r = StrictRedis.from_url(redis_dsn)
    else:
        r = FakeRedis()
    r.flushdb()
    return r
Exemple #5
0
def get_multi_karma(bot, number, reverse):
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    all_karma = redis.hgetall(bot.config['System']['redis_prefix'] + "karma")
    all_karma = [(item[0].decode('utf-8'), int(item[1])) for item in all_karma.items()]
    sorted_karma = sorted(all_karma, key=operator.itemgetter(1), reverse=reverse)

    return sorted_karma[:number]
Exemple #6
0
def continuous_migration(skip_files=None):
    """Task to continuously migrate what is pushed up by Legacy."""
    if skip_files is None:
        skip_files = current_app.config.get(
            'RECORDS_MIGRATION_SKIP_FILES',
            False,
        )
    redis_url = current_app.config.get('CACHE_REDIS_URL')
    r = StrictRedis.from_url(redis_url)
    lock = Lock(r, 'continuous_migration', expire=120, auto_renewal=True)
    if lock.acquire(blocking=False):
        try:
            while r.llen('legacy_records'):
                raw_record = r.lrange('legacy_records', 0, 0)
                if raw_record:
                    migrate_and_insert_record(
                        zlib.decompress(raw_record[0]),
                        skip_files=skip_files,
                    )
                    db.session.commit()
                r.lpop('legacy_records')
        finally:
            lock.release()
    else:
        LOGGER.info("Continuous_migration already executed. Skipping.")
Exemple #7
0
def decrement(bot, term):
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    if redis.hexists(bot.config['System']['redis_prefix'] + "karma", term):
        oldvalue = int(redis.hget(bot.config['System']['redis_prefix'] + "karma", term))
        redis.hset(bot.config['System']['redis_prefix'] + "karma", term, oldvalue - 1)
    else:
        redis.hset(bot.config['System']['redis_prefix'] + "karma", term, -1)
def redis_setup(api):
    redis_url = api.config.get('CACHE_REDIS_URL')
    r = StrictRedis.from_url(redis_url)

    yield r

    r.delete('orcidputcodes:0000-0002-2169-2152:4328')
Exemple #9
0
def configure_extensions(app):
    redis = StrictRedis.from_url(app.config['REDIS_URL'])
    # redis log
    redis_log = RedisLog(
        connection=redis,
        ttl=app.config['REDIS_LOG_TTL'],
        prefix=app.config['REDIS_LOG_PREFIX'],
    )
    app.extensions.update({
        'redis': redis,
        'redis_log': redis_log,
    })
    # rq
    for q in app.config['QUEUES']:
        queue = Queue(
            name=q, default_timeout=app.config[
                'RQ_JOB_TIMEOUT_{}'.format(q.upper())],
            connection=redis,
        )
        app.extensions['rq_{}'.format(q)] = queue

    # rq dashboard
    RQDashboard(app, url_prefix='/_rq')

    # api endpoints
    api = Api(prefix='/api')
    api.add_resource(build.Build, '/build/')
    api.add_resource(build.Logs, '/build/<build_id>/logs/')
    api.init_app(app)
Exemple #10
0
def continuous_migration():
    """Task to continuously migrate what is pushed up by Legacy."""
    indexer = RecordIndexer()
    redis_url = current_app.config.get('CACHE_REDIS_URL')
    r = StrictRedis.from_url(redis_url)

    try:
        while r.llen('legacy_records'):
            raw_record = r.lpop('legacy_records')
            if raw_record:
                # FIXME use migrate_and_insert_record(raw_record)
                # The record might be None, in case a parallel
                # continuous_migration task has already consumed the queue.
                raw_record = zlib.decompress(raw_record)
                record = marc_create_record(raw_record, keep_singletons=False)
                recid = int(record['001'][0])
                prod_record = InspireProdRecords(recid=recid)
                prod_record.marcxml = raw_record
                json_record = create_record(record)
                with db.session.begin_nested():
                    try:
                        record = record_upsert(json_record)
                    except ValidationError as e:
                        # Invalid record, will not get indexed
                        errors = "ValidationError: Record {0}: {1}".format(
                            recid, e
                        )
                        prod_record.valid = False
                        prod_record.errors = errors
                        db.session.merge(prod_record)
                        continue
                indexer.index_by_id(record.id)
    finally:
        db.session.commit()
        db.session.close()
Exemple #11
0
def continuous_migration():
    """Task to continuously migrate what is pushed up by Legacy."""
    from redis import StrictRedis
    redis_url = current_app.config.get('CACHE_REDIS_URL')
    r = StrictRedis.from_url(redis_url)

    try:
        while r.llen('legacy_records'):
            raw_record = r.lpop('legacy_records')
            if raw_record:
                # The record might be None, in case a parallel
                # continuous_migration task has already consumed the queue.
                raw_record = zlib.decompress(raw_record)
                record = marc_create_record(raw_record, keep_singletons=False)
                recid = int(record['001'][0])
                prod_record = InspireProdRecords(recid=recid)
                prod_record.marcxml = raw_record
                try:
                    with db.session.begin_nested():
                        errors, dummy = create_record(
                            record, force=True, validation=True
                        )
                        logger.info("Successfully migrated record {}".format(recid))
                        prod_record.successful = True
                        prod_record.valid = not errors
                        prod_record.errors = errors
                        db.session.merge(prod_record)
                except Exception as err:
                    logger.error("Error when migrating record {}".format(recid))
                    logger.exception(err)
                    prod_record.successful = False
                    db.session.merge(prod_record)
    finally:
        db.session.commit()
        db.session.close()
Exemple #12
0
def init(configfile='config.yaml', redis_url=None):
    logging.basicConfig(format='%(asctime)s: [%(levelname)s]: %(message)s',
                        level=logging.DEBUG)
    logging.debug('')

    # set boto log to error
    boto_log = logging.getLogger('boto')
    if boto_log:
        boto_log.setLevel(logging.ERROR)

    config = load_yaml_config(configfile)

    if not redis_url:
        redis_url = expandvars(config['redis_url'])

    redis_obj = StrictRedis.from_url(redis_url)

    config['redis_warc_resolver'] = DynRedisResolver(redis_obj,
                                                     remote_target=config['remote_target'],
                                                     proxy_target=config['proxy_target'])


    bottle_app = default_app()

    final_app, cork = init_cork(bottle_app, redis_obj, config)

    webrec = WebRec(config, cork, redis_obj)
    bottle_app.install(webrec)

    pywb_dispatch = PywbDispatcher(bottle_app)

    init_routes(webrec)
    pywb_dispatch.init_routes()

    return final_app
def redis_setup(app):
    redis_url = current_app.config.get('CACHE_REDIS_URL')
    r = StrictRedis.from_url(redis_url)

    yield r

    r.delete('legacy_orcid_tokens')
    def __init__(self, *args, **kwargs):
        if hasattr(current_app.conf, 'CELERY_REDIS_SCHEDULER_URL'):
            logger.info('backend scheduler using %s',
                        current_app.conf.CELERY_REDIS_SCHEDULER_URL)
        else:
            logger.info('backend scheduler using %s',
                        current_app.conf.CELERY_REDIS_SCHEDULER_URL)

        self.update_interval = current_app.conf.get('UPDATE_INTERVAL') or datetime.timedelta(
                seconds=10)

        # how long we should hold on to the redis lock in seconds
        if 'CELERY_REDIS_SCHEDULER_LOCK_TTL' in current_app.conf:
            lock_ttl = current_app.conf.CELERY_REDIS_SCHEDULER_LOCK_TTL
        else:
            lock_ttl = 30

        if lock_ttl < self.update_interval.seconds:
            lock_ttl = self.update_interval.seconds * 2
        self.lock_ttl = lock_ttl

        self._dirty = set()  # keeping modified entries by name for sync later on
        self._schedule = {}  # keeping dynamic schedule from redis DB here
        # self.data is used for statically configured schedule
        self.schedule_url = current_app.conf.CELERY_REDIS_SCHEDULER_URL
        self.rdb = StrictRedis.from_url(self.schedule_url)
        self._last_updated = None
        self._lock_acquired = False
        self._lock = self.rdb.lock('celery:beat:task_lock', timeout=self.lock_ttl)
        self._lock_acquired = self._lock.acquire(blocking=False)
        self.Entry.scheduler = self

        # This will launch setup_schedule if not lazy
        super(RedisScheduler, self).__init__(*args, **kwargs)
Exemple #15
0
def get_redis_from_config(settings):
    """Returns a StrictRedis instance from a dictionary of settings."""
    if settings.get('REDIS_URL') is not None:
        return StrictRedis.from_url(settings['REDIS_URL'])

    kwargs = {
        'host': settings.get('REDIS_HOST', 'localhost'),
        'port': settings.get('REDIS_PORT', 6379),
        'db': settings.get('REDIS_DB', 0),
        'password': settings.get('REDIS_PASSWORD', None),
    }

    use_ssl = settings.get('REDIS_SSL', False)
    if use_ssl:
        # If SSL is required, we need to depend on redis-py being 2.10 at
        # least
        def safeint(x):
            try:
                return int(x)
            except ValueError:
                return 0

        version_info = tuple(safeint(x) for x in redis.__version__.split('.'))
        if not version_info >= (2, 10):
            raise RuntimeError('Using SSL requires a redis-py version >= 2.10')
        kwargs['ssl'] = use_ssl
    return StrictRedis(**kwargs)
Exemple #16
0
 def redis(self):
     redis = getattr(flask.g, 'redis_client', None)
     if redis is None:
         url = app.config.get('CACHE_REDIS_URL')
         redis = StrictRedis.from_url(url)
         flask.g.redis_client = redis
     return redis
Exemple #17
0
def message_hook(bot, channel, sender, message):
    listen_on = bot.config['Slack']['listen'].split()
    if channel in listen_on and not sender.startswith("["):
        redis = StrictRedis.from_url(bot.config['System']['redis_url'])
        redis_key = bot.config['System']['redis_prefix'] + "slack-avatar-" + sender

        endpoint = bot.config['Slack']['webhook']
        chanstr = channel.replace("#","")
        target_channel = bot.config['Slack'][chanstr+"_target"]

        message = message.replace("\x01", "")
        message = re.sub(r'/\[([^@\ ]]+)\]/', r'@$1', message)

        payload = {
            'text': message,
            'username': sender,
            'channel': target_channel
        }

        if redis.exists(redis_key):
            payload['icon_url'] = redis.get(redis_key).decode("utf-8")

        postit = requests.post(
            endpoint,
            data=json.dumps(payload)
        )
Exemple #18
0
 def prepare_session(self):
     from redis import StrictRedis
     url = self.config.get('SESSION_REDIS', None)
     if url is None:
         return
     redis = StrictRedis.from_url(url)
     self.session_interface = RedisSessionInterface(redis=redis)
Exemple #19
0
def unsubscribe(bot, channel, sender, args):
    topic = args[0]
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    if redis.sismember(bot.config['System']['redis_prefix'] + "lists", topic) and redis.sismember(bot.config['System']['redis_prefix'] + "lists:%s:subscribers" % topic, sender):
        redis.srem(bot.config['System']['redis_prefix'] + "lists:%s:subscribers" % topic, sender)
        bot.message(channel, "%s: You have now been unsubscribed from the topic '%s'" % (sender, topic))
    else:
        bot.message(channel, "%s: Either that topic does not exist, or you are not a subscriber, and thus cannot unsubscribe from it!" % sender)
Exemple #20
0
def subscribe(bot, channel, sender, args):
    topic = args[0]
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    if redis.sismember(bot.config['System']['redis_prefix'] + "lists", topic):
        redis.sadd(bot.config['System']['redis_prefix'] + "lists:%s:subscribers" % topic, sender)
        bot.message(channel, "%s: You are now subscribed to the topic '%s'" % (sender, topic))
    else:
        bot.message(channel, "%s: You cannot subscribe to a topic that doesn't exist!" % sender)
Exemple #21
0
def transfer_topic(bot, channel, sender, args):
    topic = args[0]
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    if sender == redis.hget(bot.config['System']['redis_prefix'] + "lists:%s" % topic, "owner").decode('utf-8') and args[1]:
        redis.hset(bot.config['System']['redis_prefix'] + "lists:%s" % topic, "owner", args[1])
        bot.message(channel, "%s: You have now transferred ownership of the topic '%s' to %s" % (sender, topic, args[1]))
    else:
        bot.message(channel, "%s: You are not the owner of the topic '%s' and cannot transfer ownership of it." % (sender, topic))
Exemple #22
0
def disallow_sender(bot, channel, sender, args):
    topic = args[0]
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    if sender == redis.hget(bot.config['System']['redis_prefix'] + "lists:%s" % topic, "owner").decode('utf-8'):
        redis.srem(bot.config['System']['redis_prefix'] + "lists:%s:senders" % topic, args[1])
        bot.message(channel, "%s: You have now removed the ability to broadcast to the topic '%s' from %s" % (sender, topic, args[1]))
    else:
        bot.message(channel, "%s: You are not the owner of the topic '%s' and cannot add people to the senders list" % (sender, topic))
Exemple #23
0
def react(bot, channel, sender, args):
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    term = "_".join(args).lower()
    try:
        url = redis.srandmember(bot.config['System']['redis_prefix'] + "reactions:" + term).decode('utf-8')
        bot.message(channel, url)
    except TypeError:
        pass
Exemple #24
0
def includeme(config):  # pragma: no cache
    redis_url = config.registry.settings["redis.url"]
    redis_conn = StrictRedis.from_url(redis_url)

    def redis_conn_factory(context, request):
        return redis_conn

    config.register_service_factory(redis_conn_factory, name="redis")
Exemple #25
0
def add_reaction(bot, channel, sender, args):
    """ Adds a reaction for a term - usage: $addreaction http://your.url.here your description here """
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    if len(args) == 1:
        bot.message(channel, "You did not specify a description")
    url = args[0]
    term = "_".join(args[1:]).lower()
    redis.sadd(bot.config['System']['redis_prefix'] + "reactions:" + term, url)
Exemple #26
0
def karma_command(bot, channel, sender, args):
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    term = " ".join(args).lower() if args else sender.lower()
    try:
        amount = int(redis.hget(bot.config['System']['redis_prefix'] + "karma", term))
    except TypeError:
        amount = "no"
    bot.message(channel, "%s has %s karma" % (term, amount))
Exemple #27
0
def get_node_redis_conn():
    """
    获取node节点分配系统redis连接
    :return:
    """
    mc = StrictRedis.from_url(app.config['REDIS_URL'],
                              socket_connect_timeout=1,
                              socket_timeout=6)
    return mc
Exemple #28
0
def karma_command(bot, channel, sender, args):
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    term = " ".join(args).lower() if args else sender
    try:
        amount = int(
            redis.hget(bot.config['System']['redis_prefix'] + "karma", term))
    except TypeError:
        amount = "no"
    bot.message(channel, "%s has %s karma" % (term, amount))
def push_to_redis(record_file):
    record = pkg_resources.resource_string(
        __name__, os.path.join('fixtures', record_file))

    redis_url = current_app.config.get('CACHE_REDIS_URL')
    r = StrictRedis.from_url(redis_url)
    r.rpush('legacy_records', zlib.compress(record))

    return record
Exemple #30
0
 def __init__(self, *args, **kwargs):
     app = kwargs['app']
     self.key = app.conf.get("CELERY_REDIS_SCHEDULER_KEY",
                             "celery:beat:order_tasks")
     self.schedule_url = app.conf.get("CELERY_REDIS_SCHEDULER_URL",
                                      "redis://localhost:6379")
     self.rdb = StrictRedis.from_url(self.schedule_url)
     Scheduler.__init__(self, *args, **kwargs)
     app.add_task = partial(self.add, self)
Exemple #31
0
def single_enqueue(fun_dotted,
                   json_str,
                   queue_name='default',
                   timeout=10000,
                   print_only=False,
                   at_front=False,
                   **kwargs):
    q = Queue(queue_name, connection=StrictRedis.from_url(REDIS_URL_RQ))
    doQ(q, fun_dotted, json_str, timeout, print_only, at_front)
def push_to_redis(record_file):
    record = pkg_resources.resource_string(
        __name__, os.path.join('fixtures', record_file))

    redis_url = current_app.config.get('CACHE_REDIS_URL')
    r = StrictRedis.from_url(redis_url)
    r.rpush('legacy_records', zlib.compress(record))

    return record
    def __init__(self, *args, **kwargs):
        super(RedisTemplateResponse, self).__init__(*args, **kwargs)
        self.logger = logging.getLogger(__name__)
        self.connection = StrictRedis.from_url(settings.REDIS_URL,
                                               socket_timeout=self.socket_timeout)

        # Ping the server to make sure we have a valid connection.
        # This will raise a ConnectionError if the redis URL is invalid.
        self.connection.ping()
Exemple #34
0
 def __init__(self, cfg):
     self.simhash_size = cfg['simhash']['size']
     self.simhash_expire = cfg['simhash']['expire_after']
     self.http = urllib3.PoolManager(retries=urllib3.Retry(3, redirect=1))
     self.redis_db = StrictRedis.from_url(cfg['redis_uri'], decode_responses=True)
     self.thread_number = cfg['threads']
     self.snapshots_number = cfg['snapshots']['number_per_year']
     # Initialize logger
     self._log = logging.getLogger(__name__)
Exemple #35
0
def react(bot, channel, sender, args):
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    term = "_".join(args).lower()
    try:
        url = redis.srandmember(bot.config['System']['redis_prefix'] +
                                "reactions:" + term).decode('utf-8')
        bot.message(channel, url)
    except TypeError:
        pass
Exemple #36
0
def message_hook(bot, channel, sender, message):
    if "butt" in message.lower() and sender not in ['buttbot']:
        redis = StrictRedis.from_url(bot.config['System']['redis_url'])

        if redis.hexists(bot.config['System']['redis_prefix'] + "buttmaster", sender):
            current = int(redis.hget(bot.config['System']['redis_prefix'] + "buttmaster", sender))
            redis.hset(bot.config['System']['redis_prefix'] + "buttmaster", sender, current + 1)
        else:
            redis.hset(bot.config['System']['redis_prefix'] + "buttmaster", sender, 1)
Exemple #37
0
def list_topics(bot, channel, sender, args):
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    subscribed_topics = []
    topics = redis.smembers(bot.config['System']['redis_prefix'] + "lists")
    for topic in topics:
        topic = topic.decode('utf-8')
        if redis.sismember(bot.config['System']['redis_prefix'] + "lists:%s:subscribers" % topic, sender):
            subscribed_topics.append(topic)
    bot.message(channel, "%s: You are subscribed to the following topics: %s" % (sender, ", ".join(subscribed_topics)))
Exemple #38
0
def decrement(bot, term):
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    if redis.hexists(bot.config['System']['redis_prefix'] + "karma", term):
        oldvalue = int(
            redis.hget(bot.config['System']['redis_prefix'] + "karma", term))
        redis.hset(bot.config['System']['redis_prefix'] + "karma", term,
                   oldvalue - 1)
    else:
        redis.hset(bot.config['System']['redis_prefix'] + "karma", term, -1)
Exemple #39
0
def setup_db(d):
    mongo_url = d['mongo_url']
    db = mongo_url.split('/')[3]
    env.MONGO = pymongo.MongoClient(mongo_url)[db]
    env.REDIS = StrictRedis.from_url(d['redis_url'])

    for collection, fields in d['mongo_indexes'].items():
        for field in fields:
            env.MONGO[collection].ensure_index(field)
Exemple #40
0
def mk_tms():
    s, d = StrictRedis.from_url(SRC), StrictRedis.from_url(DST)

    for suite in ['dev'] + [
            'evosuite-branch.{0}'.format(i) for i in xrange(0, 10)
    ] + ['randoop.{0}'.format(i + 1) for i in xrange(0, 10)]:
        for project, version in iter_versions():
            key = ':'.join(
                [PRE_SRC, 'test-methods', project,
                 str(version), suite])
            print key
            tm_list = s.lrange(key, 0, -1)
            idxes = tn_i_s(d, tm_list, suite)
            dst_key = ':'.join(['tms', project, str(version), suite])
            assert (len(idxes) == len(tm_list))
            for chunk in chunks(idxes, 100):
                if len(chunk) == 0:
                    continue
                d.rpush(dst_key, *chunk)
Exemple #41
0
 def configure(cls, config):
     super(RedisCache, cls).configure(config)
     settings = config.get_settings()
     try:
         from redis import StrictRedis
     except ImportError:  # pragma: no cover
         raise ImportError("You must 'pip install redis' before using "
                           "redis as the database")
     db_url = settings.get('db.url')
     cls.db = StrictRedis.from_url(db_url)
def init_config_sync(uri: str,
                     db: Optional[int] = None,
                     *,
                     default_config: Optional[str] = None,
                     keys: Optional[FrozenSet[str]] = None):
    global redis
    if uri:
        redis = StrictRedis.from_url(uri, db)
    if default_config:
        load_default_config_sync(default_config, keys)
 def __init__(self):
     redis_url = os.getenv('REDIS_URL', 'localhost')
     if redis_url == 'localhost':
         self.r = StrictRedis(db=1)
     else:
         self.r = StrictRedis.from_url(redis_url)
     self.is_collecting = True
     self.lock_mod = threading.Lock()
     self.lock_len = threading.Lock()
     self.lock_tmp = threading.Lock()
Exemple #44
0
    def init_app(self, app):
        """
        Init Redis Cache and add config
        :param app: application
        """
        if "REDIS_URL" not in app.config:
            raise RuntimeError('Missing "REDIS_URL" configuration')

        self._url = app.config["REDIS_URL"]
        self.conn = StrictRedis.from_url(self._url)
Exemple #45
0
def get_redis_singleton():
    global _singleton
    if not _singleton:
        logger.debug('Redis connection pool initializing')
        _singleton = StrictRedis.from_url(os.getenv(
            'REDIS_CONNECTION_STRING', DEFAULT_REDIS_CONNECTION_STRING),
                                          decode_responses=True)
        _singleton.ping()
        logger.info('Redis connection pool initialized')
    return _singleton
Exemple #46
0
def main(options):
    r = StrictRedis.from_url(REDIS_URL_OUT)
    for project, v in iter_versions(restrict_project=options.restrict_project,
                                    restrict_version=options.restrict_version):
        reasons = []
        for qm in ['line', 'mutant', 'mutant-line']:
            key = mk_key('out', [qm, 'file', '0', 'B.F', project, v]) + ':info'
            info = json.loads(r.get(key))
            reasons.append(info[1])
        print '{project}:{v}'.format(**locals()), ' '.join(reasons)
Exemple #47
0
 def _check_redis(uri):
     client = StrictRedis.from_url(uri)
     client.connection_pool.connection_kwargs['socket_timeout'] = 3
     try:
         client.ping()
     except RedisError as exc:
         _warn('Invalid redis URI: ' + str(exc))
         return False
     else:
         return True
Exemple #48
0
def main():
    redis = StrictRedis.from_url("redis://localhost:6379/0", socket_timeout=10)
    cli = Client(redis, prefix='rpc_example')

    if sys.argv[1] == 'get':
        print(json.dumps(cli.call('get', k=sys.argv[2])))
    elif sys.argv[1] == 'set':
        print(cli.call('set', k=sys.argv[2], v=json.loads(sys.argv[3])))
    else:
        print(USAGE)
def clear_redis(users: List[User] = None):
    conn = StrictRedis.from_url(settings.REDIS_URL)
    if users:
        to_delete = []
        for u in users:
            to_delete.extend(conn.keys(rf'{u.id}*'))
        for k in to_delete:
            conn.delete(k)
    else:
        conn.flushdb()
    def __init__(self, refresh_cache=False):
        self.cache_key = "cases"
        self.cache = StrictRedis.from_url(REDIS_URL, db=REDIS_DB)
        self.urls = {
            "cases": BASE_SPREADSHEET_URL + CASES_SPREADSHEET_GID,
            "stories": BASE_SPREADSHEET_URL + STORIES_SPREADSHEET_GID,
        }

        if refresh_cache:
            self.reload_from_google_spreadsheet()
Exemple #51
0
 def __init__(self,
              redis_url='redis://',
              redis_key_prefix='ratelimit',
              bucket_size=50,
              bucket_period=30):
     self.redis = Redis.from_url(redis_url)
     self.script = self.redis.register_script(LUA_SCRIPT)
     self.redis_key_prefix = redis_key_prefix
     self.bucket_size = bucket_size
     self.bucket_period = bucket_period
Exemple #52
0
def remove_data_nodes(self, redis_url: str, name: str):
    """
    task for remove current node data by name
    """
    try:
        conn = StrictRedis.from_url(redis_url)
        data = Dict(key='data', redis=conn)
        del data[name]
    except ConnectionError:
        self.retry(countdown=5)
Exemple #53
0
 def configure(cls, settings):
     kwargs = super(RedisCache, cls).configure(settings)
     try:
         from redis import StrictRedis
     except ImportError:  # pragma: no cover
         raise ImportError("You must 'pip install redis' before using "
                           "redis as the database")
     db_url = settings.get('db.url')
     kwargs['db'] = StrictRedis.from_url(db_url, decode_responses=True)
     return kwargs
Exemple #54
0
def check_redis(*args, **kwargs):
    """Checks if configured Redis instance is pingable."""
    try:
        r = StrictRedis.from_url(current_app.config['CACHE_REDIS_URL'])
        t1 = time.time()
        res = r.ping()
        t2 = time.time()
        return 'redis', res, {'time': t2 - t1}
    except (ConnectionError, ValueError) as e:
        return 'redis', False, {'error': str(e)}
 def redis(self):
     """
     Redis storage abstraction layer. Returns a singleton with get, delete and set methods.
     """
     if getattr(self.request.registry, 'transaction_redis', None) is None:
         redis_url = self.request.registry.settings.get('epfl.transaction.url')
         if not redis_url:
             raise Exception('Transaction redis url not set!')
         self.request.registry.transaction_redis = StrictRedis.from_url(redis_url)
     return self.request.registry.transaction_redis
Exemple #56
0
def karma_command(bot, channel, sender, args):
    """ Shows Karma for yourself, or optionally another user. Usage: {bot.trigger}karma [username] """
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    term = " ".join(args).lower() if args else sender.lower()
    try:
        amount = int(
            redis.hget(bot.config['System']['redis_prefix'] + "karma", term))
    except TypeError:
        amount = "no"
    bot.message(channel, "{} has {} karma".format(term, amount))
Exemple #57
0
def create_app():
    app = Vibora(router_strategy=RouterStrategy.CLONE)
    # app.configure_static_files()
    app.add_blueprint(bp_api)
    app.logger = logger

    job_queue = Queue(connection=StrictRedis.from_url(app_config.redis_conn))
    app.components.add(job_queue)

    return app
Exemple #58
0
def get_multi_karma(bot, number, reverse):
    redis = StrictRedis.from_url(bot.config['System']['redis_url'])
    all_karma = redis.hgetall(bot.config['System']['redis_prefix'] + "karma")
    all_karma = [(item[0].decode('utf-8'), int(item[1]))
                 for item in all_karma.items()]
    sorted_karma = sorted(all_karma,
                          key=operator.itemgetter(1),
                          reverse=reverse)

    return sorted_karma[:number]
Exemple #59
0
def init_app(app):
    from redis import StrictRedis
    from flask_oauthlib.contrib.cache import Cache

    # register zerqu_cache
    Cache(app, config_prefix='ZERQU')

    # register zerqu_redis
    client = StrictRedis.from_url(app.config['ZERQU_REDIS_URI'])
    app.extensions['zerqu_redis'] = client
Exemple #60
0
 def __init__(self,
              redis_url='redis://redis:6379/0',
              task_key='task-pool',
              testing=False):
     if testing:
         # TODO: Core does this too and it's not the prettiest...
         self.redis = Mock()
     else:
         self.redis = StrictRedis.from_url(redis_url)
     self.task_key = task_key