Ejemplo n.º 1
0
async def sendlock(m: Matcher,
                   r: Redis,
                   key: str,
                   value,
                   func=lambda v: str(v)):
    await m.send(func(value))
    r.set(key, value)
Ejemplo n.º 2
0
    def post(self, request):
        form = RefreshTokenForm.create_from_request(request)

        if not form.is_valid():
            raise ValidationException(request, form)

        try:
            claims = JWTFactory.decode(form.cleaned_data['refresh'])
        except JoseError as e:
            raise ProblemDetailException(request,
                                         _('Invalid token.'),
                                         status=HTTPStatus.UNAUTHORIZED,
                                         previous=e)

        redis = Redis(host=settings.REDIS_HOST,
                      port=settings.REDIS_PORT,
                      db=settings.REDIS_DATABASE)

        if not redis.exists(f"refresh_token:{claims['jti']}"):
            raise UnauthorizedException(request)

        access_token = JWTFactory(claims['sub']).access()

        return SingleResponse(request, {'access_token': access_token},
                              status=HTTPStatus.OK)
Ejemplo n.º 3
0
class RedisPublisher(RedisNode):

    def start(self):
        conf = parse_uri(self._uri)
        self._channel = conf.pop('channel', self._name)
        self._method = conf.pop('method', 'queue')
        self._client = Redis(**conf)

    def stop(self):
        self._client.connection_pool.disconnect()

    def send(self, msg):
        """
        Send a message
        :param  msg: serializable string
        """
        self._lock.acquire()
        if self._method == 'pubsub':
            ret = self._client.publish(self._channel, msg)
            if ret < 1:
                self._log.error('No subscriber receive this message ')
        else:
            ret = self._client.rpush(self._channel, msg)
        self._lock.release()
        return ret
Ejemplo n.º 4
0
class RedisSafetyStore(object):
    def __init__(self):
        self.redisClient = Redis(connection_pool=BlockingConnectionPool())

    def isSafe(self, url):
        # if the urlstring is in the store, then it is not safe.

        try:
            notSafe = self.redisClient.get(url)
        except:
            print("Redis read error")
            raise

        if notSafe:
            return False
        else:
            return True

    def addUnsafeResources(self, jsonContent):
        for resource in jsonContent["resources"]:
            try:
                self.redisClient.set(resource, 1)
            except:
                print("Redis write error")
                raise

        return True
Ejemplo n.º 5
0
 def GET(self, key):
     r = Redis(connection_pool=redis_pool)
     url = r.hget(URL_HASH_NAME, key)
     if url:
         r.hincrby(COUNT_HASH_NAME, key)
         raise web.seeother(url)
     else:
         raise notfound()
Ejemplo n.º 6
0
    def GET(self):
        r = Redis(connection_pool=redis_pool)

        count = 0
        for key in r.hgetall(URL_HASH_NAME):
            count += 1

        return render_template('index.html', count=count, section_class='index', user_id=None)
Ejemplo n.º 7
0
 def __init__(self, client=None):
     if not client:
         self.client = Redis(host='localhost',
                             port=6379,
                             db=13,
                             password=None)
     else:
         self.client = client
Ejemplo n.º 8
0
    def __init__(self, *args, **kwargs):
        """
        Creates a new RedisBloom client.
        """
        Redis.__init__(self, *args, **kwargs)

        # Set the module commands' callbacks
        MODULE_CALLBACKS = {
            self.BF_RESERVE: bool_ok,
            #self.BF_ADD: spaceHolder,
            #self.BF_MADD: spaceHolder,
            #self.BF_INSERT: spaceHolder,
            #self.BF_EXISTS: spaceHolder,
            #self.BF_MEXISTS: spaceHolder,
            #self.BF_SCANDUMP: spaceHolder,
            #self.BF_LOADCHUNK: spaceHolder,
            self.BF_INFO: BFInfo,

            self.CF_RESERVE: bool_ok,
            #self.CF_ADD: spaceHolder,
            #self.CF_ADDNX: spaceHolder,
            #self.CF_INSERT: spaceHolder,
            #self.CF_INSERTNX: spaceHolder,
            #self.CF_EXISTS: spaceHolder,
            #self.CF_DEL: spaceHolder,
            #self.CF_COUNT: spaceHolder,
            #self.CF_SCANDUMP: spaceHolder,
            #self.CF_LOADCHUNK: spaceHolder,
            self.CF_INFO: CFInfo,


            self.CMS_INITBYDIM: bool_ok,
            self.CMS_INITBYPROB: bool_ok,
            #self.CMS_INCRBY: spaceHolder,
            #self.CMS_QUERY: spaceHolder,
            self.CMS_MERGE: bool_ok,
            self.CMS_INFO: CMSInfo,

            self.TOPK_RESERVE: bool_ok,
            self.TOPK_ADD: parseToList,
            #self.TOPK_QUERY: spaceHolder,
            #self.TOPK_COUNT: spaceHolder,
            self.TOPK_LIST: parseToList,
            self.TOPK_INFO: TopKInfo,

            self.TDIGEST_CREATE: bool_ok,
            # self.TDIGEST_RESET: bool_ok,
            # self.TDIGEST_ADD: spaceHolder,
            # self.TDIGEST_MERGE: spaceHolder,
            # self.TDIGEST_CDF: spaceHolder,
            # self.TDIGEST_QUANTILE: spaceHolder,
            # self.TDIGEST_MIN: spaceHolder,
            # self.TDIGEST_MAX: spaceHolder,
            self.TDIGEST_INFO: TDigestInfo,
        }
        for k, v in six.iteritems(MODULE_CALLBACKS):
            self.set_response_callback(k, v)
Ejemplo n.º 9
0
    def connect(self):
        conf = parse_uri(self._uri)
        self._channel = conf.pop('channel', self._name)
        self._method = conf.pop('method', 'pubsub')
        self._client = Redis(**conf)

        if self._method == 'pubsub':
            self._pubsub = self._client.pubsub()
            self._pubsub.subscribe(self._channel)
Ejemplo n.º 10
0
def new_watcher(addr: str, option: WatcherOptions) -> Casbin_Watcher:
    option.addr = addr
    option.init_config()

    w = Watcher()
    w.sub_client = Redis().client()
    w.pub_client = Redis().client()
    w.ctx = None
    w.close = None
Ejemplo n.º 11
0
 def connection(self):
     """Get Redis connection from the pool."""
     if not self._connection:
         self._connection = Redis(connection_pool=_connection_pool)
         try:
             self._connection.ping()
         except exceptions.RedisError:
             self._connection = None
             raise
     return self._connection
Ejemplo n.º 12
0
 def __init__(self, addr):
     if not addr:
         raise ValueError('Invalid redis address')
     if addr.startswith('unix://'):
         cargs = {'unix_socket_path':addr.replace('unix://', '')}
     elif addr.startswith('tcp://'):
         h = addr.replace('tcp://', '').split(':')
         cargs = {'host': h[0]}
         if len(h) == 2:
             cargs['port'] = int(h[1])
     else:
         raise ValueError('Invalid redis address')
     Redis.__init__(self, **cargs)
Ejemplo n.º 13
0
 def __init__(self, addr, db=0):
     if not addr:
         raise ValueError('Invalid redis address')
     if addr.startswith('unix://'):
         cargs = {'unix_socket_path':addr.replace('unix://', '')}
     elif addr.startswith('tcp://'):
         h = addr.replace('tcp://', '').split(':')
         cargs = {'host': h[0]}
         if len(h) == 2:
             cargs['port'] = int(h[1])
     else:
         raise ValueError('Invalid redis address')
     Redis.__init__(self, **cargs)
def show_task_lists(r: Redis) -> None:
    f = open(f'proactive_stats/stats.txt', "w+")
    todo = []
    todo = r.lrange(name=TODO_TASK_LIST, start=0, end=-1)
    done = []
    done = r.lrange(name=DONE_TASK_LIST, start=0, end=-1)
    string = f'To-do list:\n'
    for i in todo:
        string += f'{i}\n'
    string += f'\nDone list:\n'
    for i in done:
        string += f'{i}\n'
    f.write(string)
    f.close()
Ejemplo n.º 15
0
def delete():

    r = Redis()

    # # iterate a list in batches of size n
    # def batcher(iterable, n):
    #     args = [iter(iterable)] * n
    #     return zip_longest(*args)

    # # in batches of 500 delete keys matching user:*
    # for keybatch in batcher(r.scan_iter('user:*'),500):
    #     print("keybatch", keybatch)
    #     r.delete(*keybatch)
    for key in r.scan_iter("*"):
        # delete the key
        r.delete(key)
Ejemplo n.º 16
0
    def init_redis_client(cls):

        # 防止多线程竞争,获取到锁后依然可能已经初始化了
        if cls.redis_objs:
            return

        cls.instance_name = conf.get('sentinel', "")
        cls.socket_timeout = conf.get("socket_timeout", 5)
        cls.connect_timeout = conf.get("connect_timeout", 0.1)

        if not cls.instance_name:
            # 单例模式
            host = conf["host"]
            cls.redis_objs[cls.MODE_READ] = Redis(
                host=host.split(':')[0],
                port=host.split(':')[1],
                socket_timeout=cls.socket_timeout,
                socket_connect_timeout=cls.connect_timeout,
                retry_on_timeout=1
            )

            cls.redis_objs[cls.MODE_WRITE] = cls.redis_objs[cls.MODE_READ]

        else:
            # 哨兵模式
            sentinel = Sentinel(
                cls.parse_config(),
                socket_timeout=cls.socket_timeout,
                socket_connect_timeout=cls.connect_timeout,
                retry_on_timeout=1
            )

            cls.redis_objs[cls.MODE_READ] = sentinel.slave_for(cls.instance_name)
            cls.redis_objs[cls.MODE_WRITE] = sentinel.master_for(cls.instance_name)
Ejemplo n.º 17
0
    def __init__(self, client=None, *args, **kwargs):
        self._client = client
        if not self._client:
            self._client = Redis(*args, **kwargs)

        self._bind_atoms()
        self._bind_multi()
Ejemplo n.º 18
0
def setup_rq_connection():
    redis_conn = get_current_connection()
    if redis_conn == None:
        opts = OPTIONS.get('connection')
        logger.debug('Establishing Redis connection to DB %(db)s at %(host)s:%(port)s' % opts)
        redis_conn = Redis(**opts)
        push_connection(redis_conn)
Ejemplo n.º 19
0
def from_url(url, db=None, **kwargs):
    """Returns an active Redis client generated from the given database URL.

    Will attempt to extract the database id from the path url fragment, if
    none is provided.
    """
    return Redis.from_url(url, db, **kwargs)
Ejemplo n.º 20
0
def from_url(url, db=None, **kwargs):
    """Returns an active Redis client generated from the given database URL.

    Will attempt to extract the database id from the path url fragment, if
    none is provided.
    """
    return Redis.from_url(url, db, **kwargs)
Ejemplo n.º 21
0
class RedisSubscriber(RedisNode):

    def __init__(self, config, handler):
        RedisNode.__init__(self, config)
        self._handler = handler

    def connect(self):
        conf = parse_uri(self._uri)
        self._channel = conf.pop('channel', self._name)
        self._method = conf.pop('method', 'pubsub')
        self._client = Redis(**conf)

        if self._method == 'pubsub':
            self._pubsub = self._client.pubsub()
            self._pubsub.subscribe(self._channel)

    def start(self):
        self.connect()
        self.loop()

    def loop(self):
        while True:
            self._log.debug('in subscriber loop')
            raw_request = self.recv()
            self._handler(raw_request)

    def recv(self):
        """
        Return a message as a string from the receiving queue.

        """
        self._log.debug('waiting in recv()')
        self._lock.acquire()
        if self._method == 'pubsub':
            msg = self._pubsub.listen().next()['data']
        else:
            msg = self._client.blpop(self._channel)[1]

        self._log.debug('redisclient: %s' % self._name)
        self._log.debug('recv -> %s' % msg)
        self._lock.release()
        return msg

    def close(self):
        if self._method == 'pubsub':
            self._pubsub.unsubscribe(self._channel)
        self._client.connection_pool.disconnect()
Ejemplo n.º 22
0
 def _build_redis(host: str,
                  port: int=6379,
                  db: int=0,
                  password: str=None) -> Redis:
     return Redis(host=host,
                  port=port,
                  db=db,
                  password=password)
Ejemplo n.º 23
0
    def init_config(self, option: WatcherOptions):
        if option.optional_update_callback:
            self.set_update_callback(option.optional_update_callback)
        else:
            raise WatcherError("Casbin Redis Watcher callback not "
                               "set when an update was received")
        if option.sub_client:
            self.sub_client = option.sub_client
        else:
            # TODO
            self.sub_client = Redis().client()

        if option.pub_client:
            self.pub_client = option.pub_client
        else:
            # TODO
            self.pub_client = Redis().client()
Ejemplo n.º 24
0
    def GET(self):
        r = Redis(connection_pool=redis_pool)
        user_id = check_token(r)

        all_keys = r.hgetall(URL_HASH_NAME)
        url_list = []

        for key in all_keys:
            url_list.append(
                (
                    all_keys[key].replace('http://', '').replace('https://', ''),
                    key, r.hget(COUNT_HASH_NAME, key) or 0,
                    r.hget(LOG_HASH_NAME, key) or ''
                ),
            )

        return render_template('list.html', user_id=user_id, list=url_list, is_all=True)
Ejemplo n.º 25
0
class RedisSubscriber(RedisNode):
    def __init__(self, config, handler):
        RedisNode.__init__(self, config)
        self._handler = handler

    def connect(self):
        conf = parse_uri(self._uri)
        self._channel = conf.pop('channel', self._name)
        self._method = conf.pop('method', 'pubsub')
        self._client = Redis(**conf)

        if self._method == 'pubsub':
            self._pubsub = self._client.pubsub()
            self._pubsub.subscribe(self._channel)

    def start(self):
        self.connect()
        self.loop()

    def loop(self):
        while True:
            self._log.debug('in subscriber loop')
            raw_request = self.recv()
            self._handler(raw_request)

    def recv(self):
        """
        Return a message as a string from the receiving queue.

        """
        self._log.debug('waiting in recv()')
        self._lock.acquire()
        if self._method == 'pubsub':
            msg = self._pubsub.listen().next()['data']
        else:
            msg = self._client.blpop(self._channel)[1]

        self._log.debug('redisclient: %s' % self._name)
        self._log.debug('recv -> %s' % msg)
        self._lock.release()
        return msg

    def close(self):
        if self._method == 'pubsub':
            self._pubsub.unsubscribe(self._channel)
        self._client.connection_pool.disconnect()
Ejemplo n.º 26
0
 def uget(self, name):
     """
     Return the value and key ``name`` or None, and decode it if not None.
     """
     value = Redis.get(self, name)
     if value:
         return value.decode(self.encoding)
     return value
Ejemplo n.º 27
0
 def __init__(self, config):
     self._redis = Redis(host=config.get('redis','host'), 
                         port=int(config.get('redis','port')),
                         db=int(config.get('redis','db')))
     self._delta_secs = int(eval(config.get('timeseries',
                                            'delta_secs')))
     self._expiration_delay_secs = int(eval(config.get('timeseries',
                                                       'expiration_delay_secs')))
Ejemplo n.º 28
0
    def __init__(self, client=None, *args, **kwargs):
        self._client = client
        if not self._client:
            kwargs.setdefault('decode_responses', True)
            self._client = Redis(*args, **kwargs)

        self._bind_atoms()
        self._bind_multi()
Ejemplo n.º 29
0
    def _transfer_slots(redis_conn_from: Redis, redis_id_from: str, redis_conn_to: Redis, redis_id_to: str, slots: list):
        """
        Documentation from http://redis.io/commands/cluster-setslot
         1. Set the destination node slot to importing state using CLUSTER SETSLOT <slot> IMPORTING <source-node-id>.
         2. Set the source node slot to migrating state using CLUSTER SETSLOT <slot> MIGRATING <destination-node-id>.
         3. Get keys from the source node with CLUSTER GETKEYSINSLOT command and move them into the destination node
            using the MIGRATE command.
         4. Use CLUSTER SETSLOT <slot> NODE <destination-node-id> in the source or destination.
        """
        print('Transfering %d slots from %s to %s...' % (len(slots), redis_id_from, redis_id_to))
        dest_host = redis_conn_to.connection_pool.connection_kwargs['host']
        dest_port = redis_conn_to.connection_pool.connection_kwargs['port']

        pipeline_to = redis_conn_to.pipeline()
        pipeline_from = redis_conn_from.pipeline()
        for slot in slots:
            # 1, 2
            pipeline_to.execute_command('CLUSTER SETSLOT', slot, 'IMPORTING', redis_id_from)
            pipeline_from.execute_command('CLUSTER SETSLOT', slot, 'MIGRATING', redis_id_to)
        pipeline_to.execute()
        pipeline_from.execute()

        for slot in slots:
            # 3
            keys = redis_conn_from.execute_command('CLUSTER GETKEYSINSLOT', slot, 1000000)
            if len(keys) > 0:
                redis_conn_from.execute_command('MIGRATE', dest_host, dest_port, "", 0, 180000, 'KEYS', *keys)
            # 4
            redis_conn_to.execute_command('CLUSTER SETSLOT', slot, 'NODE', redis_id_to)
Ejemplo n.º 30
0
    def POST(self):
        r = Redis(connection_pool=redis_pool)

        user_id = check_token(r)

        form = url_form()

        if not form.validates():
            return render_template('add.html',
                form=form,
                user_id=user_id,
                is_add=True,
            )

        url = form['url'].value
        token = hashlib.sha1()
        token.update(url.replace('http(s)?://', '').strip())
        key = token.hexdigest()[:6]
        print key + url
        if not r.hget(URL_HASH_NAME, key):
            r.hset(URL_HASH_NAME, key, url)
            r.hset(COUNT_HASH_NAME, key, 0)
            if user_id[1]:
                r.hset(LOG_HASH_NAME, key, r.hget(TOKEN_HASH_NAME, web.input().token))

        if user_id[1] == '':
            raise web.seeother('/%s/+' % key)
        else:
            raise web.seeother('/%s/+?token=%s' % (key, user_id[1]))
Ejemplo n.º 31
0
def queue_cmd(conn: Redis,
              job_queue: str,
              cmd: str,
              email: Optional[str] = None) -> str:
    """Given a command CMD; (optional) EMAIL; and a redis connection CONN, queue
it in Redis with an initial status of 'queued'.  The following status codes
are supported:

    queued:  Unprocessed; Still in the queue
    running: Still running
    success: Successful completion
    error:   Erroneous completion

Returns the name of the specific redis hash for the specific task.

    """
    if not conn.ping():
        raise RedisConnectionError
    unique_id = ("cmd::"
                 f"{datetime.now().strftime('%Y-%m-%d%H-%M%S-%M%S-')}"
                 f"{str(uuid4())}")
    conn.rpush(job_queue, unique_id)
    for key, value in {"cmd": cmd, "result": "", "status": "queued"}.items():
        conn.hset(name=unique_id, key=key, value=value)
    if email:
        conn.hset(name=unique_id, key="email", value=email)
    return unique_id
Ejemplo n.º 32
0
    def connect(self):
        conf = parse_uri(self._uri)
        self._channel = conf.pop('channel', self._name)
        self._method = conf.pop('method', 'pubsub')
        self._client = Redis(**conf)

        if self._method == 'pubsub':
            self._pubsub = self._client.pubsub()
            self._pubsub.subscribe(self._channel)
Ejemplo n.º 33
0
    def GET(self, key):
        r = Redis(connection_pool=redis_pool)

        user_id = check_token(r)

        url = r.hget(URL_HASH_NAME, key)
        if url:
            count = r.hget(COUNT_HASH_NAME, key)

            return render_template('details.html',
                user_id=user_id,
                key=key,
                url=url,
                count=count,
                section_class='index'
            )
        else:
            raise notfound()
Ejemplo n.º 34
0
 def __init__(self, app, config):
     if 'host' not in config:
         raise ConfigurationError("Redis extension needs 'host' configured")
     if 'password' not in config:
         raise ConfigurationError(
             "Postmark extension needs 'password' configured")
     host = config['host']
     password = config['password']
     redis = Redis(host=host, password=password)
     register_singleton(redis, 'redis')
Ejemplo n.º 35
0
	def testRegisterSensorDataDbmsListener(self):
	
		#check for true when the connection variables are ok
		self.assertEqual(True, self.persistenceUtil.registerSensorDataDbmsListener())
	
		#add an invalid port to the jedisSensor
		self.persistenceUtil.r_sensor = Redis(host = "localhost", port = 6890)
		
		#check for false when connection variables are invalid
		self.assertEqual(False, self.persistenceUtil.registerSensorDataDbmsListener())
Ejemplo n.º 36
0
def _get_current_redis_client() -> Redis:
    global _redis_clients
    client = _redis_clients.current_client

    if client is None:
        assert _connection_url is not None, "please set connection string first"
        client = _redis_clients.current_client = Redis.from_url(
            _connection_url)

    return client
class Cache:
    """ Cache class
    """
    def __init__(self):
        """ Store an instance of the Redis client as a private variable
            named _redis
        """
        self._redis = Redis()
        self._redis.flushdb()

    @call_history
    @count_calls
    def store(self, data: Union[str, bytes, int, float]) -> str:
        """ Takes a data argument and returns a string
        """
        key = str(uuid4())
        self._redis.set(key, data)
        return key

    def get(self, key: str, fn: Optional[Callable] = None) ->\
            Union[str, bytes, int, float]:
        """ Take a key string argument and an optional Callable argument
            named fn
        """
        if key:
            result = self._redis.get(key)
            if fn:
                return fn(result)
            else:
                return result

    def get_str(self, data: bytes) -> str:
        """ Will automatically parametrize Cache.get with the correct
            conversion function
        """
        return data.decode('utf-8')

    def get_int(self, data: bytes) -> int:
        """ Will automatically parametrize Cache.get with the correct
            conversion function
        """
        byte_order = sys.byteorder
        return int.from_bytes(data, byte_order)
Ejemplo n.º 38
0
 def hgetall(self, name):
     """
     Return a Python dict of the hash's name/value pairs, both key and
     value decoded.
     """
     output = {}
     info = Redis.hgetall(self, name)
     for key, value in info.iteritems():
         output[key.decode(self.encoding)] = value.decode(self.encoding)
     return output
Ejemplo n.º 39
0
    def _add_new_nodes(self, cluster_size):
        old_nodes = self.nodes.copy()
        nodes_before = self._get_nodes_primitive()
        self._docker_scale(cluster_size)
        nodes_after = self._get_nodes_primitive()

        new_ips = [':'.join(map(str, x)) for x in set(nodes_after) - set(nodes_before)]
        print(new_ips)
        master_ip_port = old_nodes[0]['ip_port']
        master_ip, master_port = master_ip_port.split(':')
        master_conn = Redis(master_ip, master_port)

        print("Adding nodes to the cluster")
        for ip in new_ips:
            new_ip, new_port = ip.split(':')
            master_conn.execute_command('CLUSTER MEET', new_ip, new_port)

        print("Preventive fix")
        sleep(3)
        fix = subprocess.Popen(['ruby', 'redis-trib.rb', 'fix', master_ip_port], stdin=subprocess.PIPE, stdout=subprocess.DEVNULL)
        fix.communicate(b'yes\n')
        fix.wait()
        sleep(3)

        new_nodes = [x for x in self._get_running_nodes() if x['ip_port'] in new_ips]
        slots_per_node = round(16384 / cluster_size)

        old_redises = {x[0]: Redis(x[0], x[1]) for x in (y['ip_port'].split(':') for y in old_nodes)}
        new_redises = [Redis(x[0], x[1]) for x in (y['ip_port'].split(':') for y in new_nodes)]
        slots_repartition = self._get_slots_repartition(list(old_redises.values())[0])

        for dest_node, dest_redis, i in zip(new_nodes, new_redises, range(len(new_nodes))):
            slots = slots_repartition[i * slots_per_node: (i + 1) * slots_per_node]
            sources_ip = {x[1] for x in slots}
            for source_ip in sources_ip:
                slots_for_source = [x for x in slots if x[1] == source_ip]
                source_redis = old_redises[source_ip]
                self._transfer_slots(source_redis, slots_for_source[0][3],
                                     dest_redis, dest_node['id'],
                                     [x[0] for x in slots_for_source])

        subprocess.check_call(['ruby', 'redis-trib.rb', 'info', master_ip_port])
Ejemplo n.º 40
0
    def redis_client(self) -> Redis:
        """Provide an instance of Redis client."""
        if self._redis_client is None:
            redis_client = Redis(connection_pool=self.redis_conn_pool)

            self._redis_client = redis_client

            self._logger.debug("[%s]: Initialized Redis client: %s",
                               self.__name__, self._redis_client)

        return self._redis_client
Ejemplo n.º 41
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    logging.getLogger("c2corg_api").setLevel(logging.INFO)

    redis_url = "{0}?db={1}".format(settings["redis.url"], settings["redis.db_cache"])
    log.info("Cache Redis: {0}".format(redis_url))

    # we don't really need a connection pool here, but the `from_url`
    # function is convenient
    redis_pool = ConnectionPool.from_url(redis_url, max_connections=1)

    # remove all keys from the database
    r = Redis(connection_pool=redis_pool)
    r.flushdb()
    log.info("Flushed cache")
Ejemplo n.º 42
0
 def flushall(self):
     if self.cluster_size == 1:
         subprocess.check_call([
             'redis-cli', '-h', 'erasuretester_redis-standalone_1',
             'FLUSHALL'
         ])
     elif self.cluster_size >= 2:
         nodes = self._get_running_nodes()
         for redis in (Redis(x[0], x[1])
                       for x in (y['ip_port'].split(':') for y in nodes)):
             redis.flushall()
Ejemplo n.º 43
0
class Jedis():
    def __init__(self):
        pool = ConnectionPool(host=cfg.REDIS_HOST, port=cfg.REDIS_PORT)
        self.client = Redis(connection_pool=pool)

    def page_query(self, redis_key, start, end):
        '''
        分页查询
        :param redis_key: redis中的key名称
        :param start:起始页
        :param end:结束页
        :return:
        '''
        return self.client.lrange(name=redis_key, start=start, end=end)

    def len(self, redis_key):
        return self.client.llen(name=redis_key)

    def lpush(self, key, val):
        self.client.lpush(key, val)
Ejemplo n.º 44
0
def queue_cmd(cmd: str, conn: Redis) -> str:
    """Given a command CMD, and a redis connection CONN, queue it in Redis
with an initial status of 'queued'.  The following status codes are
supported:

    queued:  Unprocessed; Still in the queue
    running: Still running
    success: Successful completion
    error:   Erroneous completion

    """
    if not conn.ping():
        raise RedisConnectionError
    unique_id = ("cmd::"
                 f"{datetime.now().strftime('%Y-%m-%d%H-%M%S-%M%S-')}"
                 f"{str(uuid4())}")
    for key, value in {"cmd": cmd, "result": "", "status": "queued"}.items():
        conn.hset(key, value, unique_id)
        conn.rpush("GN2::job-queue", unique_id)
    return unique_id
Ejemplo n.º 45
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    logging.getLogger('c2corg_ui').setLevel(logging.INFO)

    log.info('Cache Redis: {0}'.format(settings['redis.url']))

    # we don't really need a connection pool here, but the `from_url`
    # function is convenient
    redis_pool = ConnectionPool.from_url(
        settings['redis.url'], max_connections=1)

    # remove all keys from the database
    r = Redis(connection_pool=redis_pool)
    r.flushdb()
    log.info('Flushed cache')
Ejemplo n.º 46
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    logging.getLogger('c2corg_ui').setLevel(logging.INFO)

    log.info('Cache Redis: {0}'.format(settings['redis.url']))

    # we don't really need a connection pool here, but the `from_url`
    # function is convenient
    redis_pool = ConnectionPool.from_url(
        settings['redis.url'], max_connections=1)

    # remove all keys from the database
    r = Redis(connection_pool=redis_pool)
    r.flushdb()
    log.info('Flushed cache')
Ejemplo n.º 47
0
 def redis(self):
     """Returns the redis.client object for the given host/port
     Performing lazy initialization if it hasn't already been created
     """
     if not self.__redis:
         redis = Redis( host=self.__host, port=self.__port, password=self.__password, socket_timeout=self.__connection_timeout )
         self.__update_latest( redis )
         #__update_latest can raise an exception, so don't assign to self until it
         #completes successfully
         self.__redis = redis
     return self.__redis
Ejemplo n.º 48
0
    def __init__(self, urls, **settings):
        """
        Args:
            urls: list of connection urls for `redis.client.StrictRedis#from_url`.
            settings: additional settings for redis client.

        Attributes:
            mapping (dict): maintain the connection url & corresponding Redis client mapping.
            sharding: `tornext.sharding.Sharding` instance for dispatching requests.
        """
        self.sharding = Sharding(urls)
        self.mapping  = dict([(url, Redis.from_url(url)) for url in urls])
Ejemplo n.º 49
0
    def _connect(self) -> NoReturn:

        """
        Connection to redis server with retries
        :param retry_counter: counter of times of retrying
        :return: Redis connection if it was successful and None otherwise
        """

        try:
            self._kv_storage = Redis(
                host=self.host,
                port=self.port,
                socket_timeout=self.timeout
            )
            self._kv_storage.ping()
        except Exception:
            logging.error(
                "Got error while connecting to redis with host %s and port %d.",
                self.host,
                self.port
            )
Ejemplo n.º 50
0
class Cache:
    """Redis Class"""
    def __init__(self):
        """constructor"""
        self._redis = Redis()
        self._redis.flushdb()

    @call_history
    @count_calls
    def store(self, data: Union[str, bytes, int, float]) -> str:
        """returns a string"""
        key = str(uuid.uuid4())
        self._redis.set(key, data)
        return key

    def get(self, key: str, fn: Optional[Callable] = None) ->\
            Union[str, bytes, int, float]:
        """get method"""
        if key:
            result = self._redis.get(key)
            return fn(result) if fn else result
Ejemplo n.º 51
0
 def _get_slots_repartition(any_redis_conn: Redis):
     """
     Returns a shuffled list of (slot_number, node_ip, node_port, node_id)
     """
     # List of [10923, 16383, [b'10.0.0.4', 6379, b'f1dc21d0b7a24aaea3b3fcd0ef943a35fa2ebb42']]
     cluster_slots = any_redis_conn.execute_command('CLUSTER SLOTS')
     output = []
     for slot in cluster_slots:
         for i in range(slot[0], slot[1] + 1):
             output.append((i, slot[2][0].decode(), slot[2][1], slot[2][2].decode()))
     random.shuffle(output)
     return output
Ejemplo n.º 52
0
    def lrange(self, name, start, end):
        """
        Return a slice of the list ``name`` between
        position ``start`` and ``end``
        
        ``start`` and ``end`` can be negative numbers just like
        Python slicing notation

        decode the results
        """
        results = Redis.lrange(self, name, start, end)
        return [result.decode(self.encoding) for result in results]
Ejemplo n.º 53
0
class RedisQueue(): 
    def __init__(self):    
        self.redis = Redis(host=settings.REDIS['host'], port=settings.REDIS['port'])
    
    def pop(self, queueName):
        item = self.redis.rpop(queueName)
        try:
            item = json.loads(item)
        except:
            pass
        return item
         
    def push(self, queueName, item):  
        try:
            item = json.dumps(item)
        except:
            pass
        self.redis.lpush(queueName, item)
        
    def isEmpty(self, queueName):
        if self.redis.llen(queueName):
            return False
        else:
            return True
Ejemplo n.º 54
0
class TSStore(object):

    def __init__(self, config):
        self._redis = Redis(host=config.get('redis','host'), 
                            port=int(config.get('redis','port')),
                            db=int(config.get('redis','db')))
        self._delta_secs = int(eval(config.get('timeseries',
                                               'delta_secs')))
        self._expiration_delay_secs = int(eval(config.get('timeseries',
                                                          'expiration_delay_secs')))

    def queries_key(self):
        return 'queries'
    @property
    def queries(self):
        return self._redis.smembers(self.queries_key())
    @queries.setter
    def queries(self, values):
        pipe = self._redis.pipeline()
        pipe.delete(self.queries_key())
        for v in values:
            pipe.sadd(self.queries_key(),
                      v)
        return pipe.execute()

    def _interval_key(self, timestamp):
        return int(timestamp) - int(timestamp) % self._delta_secs
    def _ts_key(self, timestamp, query):
        return 'ts:%(query)s:%(timestamp_key)s'%{'query':query,
                                                 'timestamp_key':self._interval_key(timestamp)}
    def _tweet_key(self, t):
        if type(t) == Tweet:
            return 'tweet:%s'%t.id
        return 'tweet:%s'%t
    def _query_key(self, query):
        return 'query:%s:last_tweet_id'%query

    def _store_tweet(self, pipe, tweet):
        tweet_key = self._tweet_key(tweet)
        pipe.set(tweet_key, tweet.serialize())
        pipe.expire(tweet_key, self._expiration_delay_secs)
    def _reference_tweet(self, pipe, timestamp, query, tweet):
        ts_key = self._ts_key(timestamp, query)
        pipe.lpush(ts_key,tweet.id)
        pipe.expire(ts_key,self._expiration_delay_secs)
    def _update_last_query_tweet(self, pipe, query, tweet):
        query_key = self._query_key(query)
        pipe.set(query_key,tweet.id)
    def append(self, query, tweet):
        pipe = self._redis.pipeline()
        timestamp = time.time()
        self._store_tweet(pipe, tweet)
        self._reference_tweet(pipe, timestamp, query, tweet)
        self._update_last_query_tweet(pipe, query, tweet)
        return pipe.execute()

    def retrieve_ts(self, query, timestamp, n_elements=-1):
        ts_key = self._ts_key(timestamp, query)
        return self._redis.lrange(ts_key, 0, n_elements)
    def retrieve_last_tweet_id(self, query):
        query_key = self._query_key(query)
        return self._redis.get(query_key)
    def retrieve_tweet(self, tweet_id):
        tweet_key = self._tweet_key(tweet_id)
        data = self._redis.get(tweet_key)
        return Tweet.deserialize(data).todict()
    def retrieve(self, query, n_periods=30):
        current_timestamp = now = int(time.time())
        start_timestamp = now - self._delta_secs * n_periods
        tweets = []
        while current_timestamp > start_timestamp:
            current_tweet_ids = self.retrieve_ts(query, current_timestamp)
            tweets.append({'timestamp': current_timestamp,
                           'tweets' : [ self.retrieve_tweet(tid) for tid in current_tweet_ids ] })
            current_timestamp -= self._delta_secs 
        return { 'now' : now,
                 'ts' : tweets }
Ejemplo n.º 55
0
 def __init__(self, *args, **kwargs):
     self.encoding = 'utf-8'
     Redis.__init__(self, *args, **kwargs)
Ejemplo n.º 56
0
 def smembers(self, name):
     """
     Return all members of the set ``name``, decoding.
     """
     values = Redis.smembers(self, name)
     return (value.decode(self.encoding) for value in values)
Ejemplo n.º 57
0
# -*- coding: utf-8 -*-
__author__ = 'fjs'



from connection import BlockingConnectionPool
from redis.client import Redis

client = Redis(connection_pool=BlockingConnectionPool(max_connections=2))


client.set("fjs", "fjs")
print client.get("fjs")

client.lpush("nn", 1)
print client.lpop("nn")

Ejemplo n.º 58
0
 def __new__(cls, *args, **kwargs):
     k = str(args)+str(kwargs)
     if k not in cls.__instances:
         cls.__instances[k] = Redis.__new__(cls, *args, **kwargs)
     return cls.__instances[k]
Ejemplo n.º 59
0
 def start(self):
     conf = parse_uri(self._uri)
     self._channel = conf.pop('channel', self._name)
     self._method = conf.pop('method', 'queue')
     self._client = Redis(**conf)
Ejemplo n.º 60
0
class HotClient(object):
    """
    A Redis client wrapper that loads Lua functions and creates
    client methods for calling them.
    """
    _ATOMS_FILE_NAME = "atoms.lua"
    _BIT_FILE_NAME = "bit.lua"
    _MULTI_FILE_NAME = "multi.lua"

    def __init__(self, client=None, *args, **kwargs):
        self._client = client
        if not self._client:
            kwargs.setdefault('decode_responses', True)
            self._client = Redis(*args, **kwargs)

        self._bind_atoms()
        self._bind_multi()

    def _bind_atoms(self):
        with open(self._get_lua_path(self._BIT_FILE_NAME)) as f:
            luabit = f.read()

        requires_luabit = (
            "number_and",
            "number_or",
            "number_xor",
            "number_lshift",
            "number_rshift"
        )

        for name, snippet in self._split_lua_file_into_funcs(
                self._ATOMS_FILE_NAME):
            if name in requires_luabit:
                snippet = luabit + snippet
            self._bind_lua_method(name, snippet)

    def _bind_multi(self):
        for name, snippet in self._split_lua_file_into_funcs("multi.lua"):
            self._bind_private_lua_script(name, snippet)

    @staticmethod
    def _get_lua_path(name):
        """
        Joins the given name with the relative path of the module.
        """
        parts = (os.path.dirname(os.path.abspath(__file__)), "lua", name)
        return os.path.join(*parts)

    def _split_lua_file_into_funcs(self, file_name):
        """
        Returns the name / code snippet pair for each Lua function
        in the file under file_name.
        """
        with open(self._get_lua_path(file_name)) as f:
            for func in f.read().strip().split("function "):
                if func:
                    bits = func.split("\n", 1)
                    name = bits[0].split("(")[0].strip()
                    snippet = bits[1].rsplit("end", 1)[0].strip()
                    yield name, snippet

    def _bind_lua_method(self, name, code):
        """
        Registers the code snippet as a Lua script, and binds the
        script to the client as a method that can be called with
        the same signature as regular client methods, eg with a
        single key arg.
        """
        script = self._client.register_script(code)
        method = lambda key, *a, **k: script(keys=[key], args=a, **k)
        setattr(self, name, method)

    def _bind_private_lua_script(self, name, code):
        """
        Registers the code snippet as a Lua script, and binds the
        script to the client as a private method (eg. some_lua_func becomes
        a _some_lua_func method of HotClient) that can be latter wrapped in
        public methods with better argument and error handling.
        """
        script = self._client.register_script(code)
        setattr(self, '_' + name, script)

    def rank_lists_by_length(self, *keys):
        """
        Creates a temporary ZSET with LIST keys as entries and their
        *LLEN* as scores. Uses ZREVRANGE .. WITHSCORES, to return keys and
        lengths sorted from longest to shortests.
        :param keys: keys of the lists you want rank
        :return: :rtype: Ranking :raise ValueError:
        :raise ValueError: when not enough keys are provided
        """
        return Ranking(self._rank_lists_by_length, keys)

    def rank_sets_by_cardinality(self, *keys):
        """
        Creates a temporary ZSET with SET keys as entries and their
        *CARD* as scores. Uses ZREVRANGE .. WITHSCORES, to return keys and
        cardinalities sorted from largest to smallest.
        :param keys: keys of the sets you want to rank
        :return: :rtype: Ranking
        :raise ValueError: when not enough keys are provided
        """
        return Ranking(self._rank_sets_by_cardinality, keys)

    def __getattr__(self, name):
        if name in self.__dict__:
            return super(HotClient, self).__getattribute__(name)
        return self._client.__getattribute__(name)