def sync_get(self, hash_id, identity, *args, **kwargs): """ For getting data from cache :param hash_id: Unique Hash key for the data :param identity: Unique Integer for the data :param args: Args for the sync function. (Default: None) """ redis = StrictRedis(connection_pool=self.redis_pool) hash_key = key_generator(self.hash_key, hash_id) key = key_generator(self.key, identity) try: if redis.hexists(hash_key, key): data = self.get_func(redis.hget(hash_key, key)) else: data = self.sync_func(identity, *args, **kwargs) redis.hset(hash_key, key, self.set_func(data)) if data is not None or data != "": return data return None except RedisError as re: self.log.error("[REDIS] %s", str(re)) data = self.sync_func(identity, args) return data finally: del redis
def handle_overview(args: dict) -> dict: if not __Application.serve: return { 'status': 1, 'message': "service off", 'service': "off", 'data': [] } redis = StrictRedis(connection_pool=__Application.redis_pool) lock = Lock(redis, "Server-Overview") if lock.acquire(): try: if 'jasdm' not in args.keys() or not redis.hexists( "Overview", args['jasdm']): raise KeyError('jasdm') value = json.loads(redis.hget(name="Overview", key=args['jasdm'])) return { 'status': 0, 'message': "ok", 'service': "on", 'data': value } finally: lock.release()
def handle_empty(args: Dict[str, str]) -> Dict[str, Any]: if not __Application.serve: return { 'status': 1, 'message': "service off", 'service': "off", 'data': [] } redis = StrictRedis(connection_pool=__Application.redis_pool) lock = Lock(redis, "Server-Empty") if lock.acquire(): try: if 'day' not in args.keys() or not args['day'].isdigit() or not ( 0 <= int(args['day']) <= 6): raise KeyError('day') elif 'dqjc' not in args.keys() or not args['dqjc'].isdigit(): raise KeyError('dqjc') elif 'jxl' not in args.keys() or not redis.hexists( "Empty", f"{args['jxl']}_{args['day']}"): raise KeyError('jxl') jxl, day, dqjc = args['jxl'], int(args['day']), int(args['dqjc']) value = json.loads( redis.hget(name="Empty", key=f"{args['jxl']}_{args['day']}")) classrooms = [] for classroom in value: if classroom['jc_ks'] <= dqjc <= classroom['jc_js']: classrooms.append(classroom) for i in range(len(classrooms)): classrooms[i]['id'] = i + 1 return { 'status': 0, 'message': "ok", 'service': "on", 'data': classrooms } finally: lock.release()
class RedisUtil: def __init__(self): self.redishanndle = StrictRedis( host=rediscommon.redis_link_host, port=rediscommon.redis_link_port, db=rediscommon.redis_link_db, password=rediscommon.redis_link_password) pass def queuelpush(self, json_str): self.redishanndle.lpush(rediscommon.agent_redis_queue_key, json_str) pass def queuelpop(self): return self.redishanndle.lpop(rediscommon.agent_redis_queue_key) pass def queuerpush(self, json_str): self.redishanndle.rpush(rediscommon.agent_redis_queue_key, json_str) pass def queuerpop(self): return self.redishanndle.rpop(rediscommon.agent_redis_queue_key) pass def hashset(self, field, value): self.redishanndle.hset(rediscommon.agent_redis_hash_key, field, value) pass def hashget(self, field): return self.redishanndle.hget(rediscommon.agent_redis_hash_key, field) pass def hashexists(self, field): return self.redishanndle.hexists(rediscommon.agent_redis_hash_key, field) pass def hashdel(self, field): return self.redishanndle.hdel(rediscommon.agent_redis_hash_key, field) pass def hashexistsprint(self): print(self.redishanndle.hkeys(rediscommon.agent_redis_hash_key)) pass #向有序集合添加一个或多个成员,或者更新已存在成员的分数 def sortedsetZADD(self, score, member): adddata = {member: score} self.redishanndle.zadd(rediscommon.agent_redis_sortedset_key, adddata) pass #获取有序集合的成员数 def sortedsetZCARD(self): return self.redishanndle.zcard(rediscommon.agent_redis_sortedset_key) pass #移除有序集合中的一个或多个成员 def sortedsetZREM(self, member): self.redishanndle.zrem(rediscommon.agent_redis_sortedset_key, member) pass #通过索引区间返回有序集合成指定区间内的成员 def sortedsetZRANGE(self, start, stop): returndata = [] datalist = self.redishanndle.zrange( rediscommon.agent_redis_sortedset_key, start, stop) for data in datalist: returndata.append(str(data, 'utf-8')) return returndata pass #返回有序集合中指定成员的索引 def sortedsetZRANK(self, member): return self.redishanndle.zrank(rediscommon.agent_redis_sortedset_key, member) pass #返回有序集中,成员的分数值 def sortedsetZSCORE(self, member): return self.redishanndle.zscore(rediscommon.agent_redis_sortedset_key, member) pass #返回有序集中指定区间内的成员,通过索引,分数从高到底 def sortedsetZREVRANGE(self, start, stop): returndata = [] datalist = self.redishanndle.zrevrange( rediscommon.agent_redis_sortedset_key, start, stop) for data in datalist: returndata.append(str(data, 'utf-8')) return returndata pass #返回有序集中 最前面的第几个 def sortedsetGETTOP(self, number): returndata = [] sum = self.redishanndle.zcard(rediscommon.agent_redis_sortedset_key) datalist = self.redishanndle.zrevrange( rediscommon.agent_redis_sortedset_key, sum - number, sum) for data in datalist: returndata.append(str(data, 'utf-8')) return returndata pass #按分数返回一个成员范围的有序集合。 def sortedsetZRANGEBYSCORE( self, min, max, start=None, num=None, ): returndata = [] datalist = self.redishanndle.zrangebyscore( rediscommon.agent_redis_sortedset_key, min, max, start, num) for data in datalist: returndata.append(str(data, 'utf-8')) return returndata pass #计算在有序集合中指定区间分数的成员数 def sortedsetZCOUNT(self, min, max): return self.redishanndle.zcount(rediscommon.agent_redis_sortedset_key, min, max) pass
class RespectfulRequester: def __init__(self): self.redis = StrictRedis(host=config["redis"]["host"], port=config["redis"]["port"], password=config["redis"]["password"], db=config["redis"]["database"]) try: self.redis.echo("Testing Connection") except ConnectionError: raise RequestsRespectfulRedisError( "Could not establish a connection to the provided Redis server" ) try: self.redis.echo("Testing Connection") except ConnectionError: raise RequestsRespectfulRedisError( "Could not establish a connection to the provided Redis server" ) def __getattr__(self, attr): if attr in [ "delete", "get", "head", "options", "patch", "post", "put" ]: return getattr(self, "_requests_proxy_%s" % attr) else: raise AttributeError() @property def redis_prefix(self): return "RespectfulRequester" def request(self, request_func, realm=None, realms=None, wait=False): if realm is not None: warnings.warn( "'realm' kwarg will be removed in favor of providing a 'realms' list starting in 0.3.0", DeprecationWarning) realms = [realm] registered_realms = self.fetch_registered_realms() for r in realms: if r not in registered_realms: raise RequestsRespectfulError( "Realm '%s' hasn't been registered" % realm) if wait: while True: try: return self._perform_request(request_func, realms=realms) except RequestsRespectfulRateLimitedError: pass time.sleep(1) else: return self._perform_request(request_func, realms=realms) def fetch_registered_realms(self): return list( map(lambda k: k.decode("utf-8"), self.redis.smembers("%s:REALMS" % self.redis_prefix))) def register_realm(self, realm, max_requests, timespan): redis_key = self._realm_redis_key(realm) if not self.redis.hexists(redis_key, "max_requests"): self.redis.hmset(redis_key, { "max_requests": max_requests, "timespan": timespan }) self.redis.sadd("%s:REALMS" % self.redis_prefix, realm) return True def register_realms(self, realm_tuples): for realm_tuple in realm_tuples: self.register_realm(*realm_tuple) return True def update_realm(self, realm, **kwargs): redis_key = self._realm_redis_key(realm) updatable_keys = ["max_requests", "timespan"] for updatable_key in updatable_keys: if updatable_key in kwargs and type(kwargs[updatable_key]) == int: self.redis.hset(redis_key, updatable_key, kwargs[updatable_key]) return True def unregister_realm(self, realm): self.redis.delete(self._realm_redis_key(realm)) self.redis.srem("%s:REALMS" % self.redis_prefix, realm) request_keys = self.redis.keys("%s:REQUEST:%s:*" % (self.redis_prefix, realm)) [self.redis.delete(k) for k in request_keys] return True def unregister_realms(self, realms): for realm in realms: self.unregister_realm(realm) return True def realm_max_requests(self, realm): realm_info = self._fetch_realm_info(realm) return int(realm_info["max_requests".encode("utf-8")].decode("utf-8")) def realm_timespan(self, realm): realm_info = self._fetch_realm_info(realm) return int(realm_info["timespan".encode("utf-8")].decode("utf-8")) @classmethod def configure(cls, **kwargs): if "redis" in kwargs: if type(kwargs["redis"]) != dict: raise RequestsRespectfulConfigError( "'redis' key must be a dict") expected_redis_keys = ["host", "port", "password", "database"] missing_redis_keys = list() for expected_redis_key in expected_redis_keys: if expected_redis_key not in kwargs["redis"]: missing_redis_keys.append(expected_redis_key) if len(missing_redis_keys): raise RequestsRespectfulConfigError( "'%s' %s missing from the 'redis' configuration key" % (", ".join(missing_redis_keys), "is" if len(missing_redis_keys) == 1 else "are")) config["redis"] = kwargs["redis"] global redis redis = StrictRedis(host=config["redis"]["host"], port=config["redis"]["port"], db=config["redis"]["database"]) if "safety_threshold" in kwargs: if type(kwargs["safety_threshold"] ) != int or kwargs["safety_threshold"] < 0: raise RequestsRespectfulConfigError( "'safety_threshold' key must be a positive integer") config["safety_threshold"] = kwargs["safety_threshold"] if "requests_module_name" in kwargs: if type(kwargs["requests_module_name"]) != str: raise RequestsRespectfulConfigError( "'requests_module_name' key must be string") config["requests_module_name"] = kwargs["requests_module_name"] return config @classmethod def configure_default(cls): for key in config: config[key] = default_config[key] return config def _perform_request(self, request_func, realms=None): self._validate_request_func(request_func) rate_limited_realms = list() for realm in realms: if not self._can_perform_request(realm): rate_limited_realms.append(realm) if not len(rate_limited_realms): for realm in realms: request_uuid = str(uuid.uuid4()) self.redis.setex(name="%s:REQUEST:%s:%s" % (self.redis_prefix, realm, request_uuid), time=self.realm_timespan(realm), value=request_uuid) return request_func() else: raise RequestsRespectfulRateLimitedError( "Currently rate-limited on Realm(s): %s" % ", ".join(rate_limited_realms)) def _realm_redis_key(self, realm): return "%s:REALMS:%s" % (self.redis_prefix, realm) def _fetch_realm_info(self, realm): redis_key = self._realm_redis_key(realm) return self.redis.hgetall(redis_key) def _requests_in_timespan(self, realm): return len( self.redis.scan(cursor=0, match="%s:REQUEST:%s:*" % (self.redis_prefix, realm), count=self._redis_keys_in_db() + 100)[1]) def _redis_keys_in_db(self): return self.redis.info().get("db%d" % config["redis"]["database"]).get("keys") def _can_perform_request(self, realm): return self._requests_in_timespan(realm) < ( self.realm_max_requests(realm) - config["safety_threshold"]) # Requests proxy def _requests_proxy(self, method, *args, **kwargs): realm = kwargs.pop("realm", None) realms = kwargs.pop("realms", list()) if realm: warnings.warn( "'realm' kwarg will be removed in favor of providing a 'realms' list starting in 0.3.0", DeprecationWarning) realms.append(realm) if not len(realms): raise RequestsRespectfulError("'realms' is a required kwarg") wait = kwargs.pop("wait", False) return self.request(lambda: getattr(requests, method)(*args, **kwargs), realms=realms, wait=wait) def _requests_proxy_delete(self, *args, **kwargs): return self._requests_proxy("delete", *args, **kwargs) def _requests_proxy_get(self, *args, **kwargs): return self._requests_proxy("get", *args, **kwargs) def _requests_proxy_head(self, *args, **kwargs): return self._requests_proxy("head", *args, **kwargs) def _requests_proxy_options(self, *args, **kwargs): return self._requests_proxy("options", *args, **kwargs) def _requests_proxy_patch(self, *args, **kwargs): return self._requests_proxy("patch", *args, **kwargs) def _requests_proxy_post(self, *args, **kwargs): return self._requests_proxy("post", *args, **kwargs) def _requests_proxy_put(self, *args, **kwargs): return self._requests_proxy("put", *args, **kwargs) @staticmethod def _validate_request_func(request_func): request_func_string = inspect.getsource(request_func) post_lambda_string = request_func_string.split(":")[1].strip() if not post_lambda_string.startswith( config["requests_module_name"] ) and not post_lambda_string.startswith("getattr(requests"): raise RequestsRespectfulError( "The request lambda can only contain a requests function call") @staticmethod def _config(): return config
class RedisBackendTest(TestCase): def setUp(self): self.backend = RedisBackend() self.redis = StrictRedis() def test_get_source_key(self): self.assertEqual(self.backend.get_source_key('a.jpg'), 'djthumbs-test:sources:a.jpg') def test_get_thumbnail_key(self): self.assertEqual(self.backend.get_thumbnail_key('a.jpg'), 'djthumbs-test:thumbnails:a.jpg') def test_add_delete_source(self): source_name = 'test-thumbnail.jpg' source_key = self.backend.get_source_key(source_name) self.backend.add_source(source_name) self.assertTrue(self.redis.hexists(source_key, source_name)) self.backend.delete_source(source_name) self.assertFalse(self.redis.hexists(source_key, source_name)) def test_get_source(self): source_name = 'test-thumbnail.jpg' source_key = self.backend.get_source_key(source_name) self.redis.hset(source_key, source_name, source_name) self.assertEqual(self.backend.get_source(source_name), source_name) # Delete Source self.redis.hdel(source_key, source_name) def test_add_delete_thumbnail(self): source_name = 'test-thumbnail.jpg' size = 'small' thumbnail_key = self.backend.get_thumbnail_key(source_name) self.backend.add_source(source_name) self.backend.add_thumbnail(source_name, size, 'test-thumbnail_small.jpg') self.assertTrue(self.redis.hexists(thumbnail_key, size)) self.backend.delete_thumbnail(source_name, size) self.assertFalse(self.redis.hexists(thumbnail_key, size)) # Delete Source self.redis.hdel(self.backend.get_source_key(source_name), source_name) def test_get_thumbnail(self): source_name = 'test-thumbnail.jpg' self.backend.add_source(source_name) self.backend.add_thumbnail(source_name, 'small', 'test-thumbnail_small.jpg') self.assertEqual( self.backend.get_thumbnail(source_name, 'small'), ImageMeta(source_name, 'test-thumbnail_small.jpg', 'small')) self.backend.add_thumbnail(source_name, 'large', 'test-thumbnail_large.jpg') expected = ['test-thumbnail_large.jpg', 'test-thumbnail_small.jpg'] result = [ image_meta.name for image_meta in self.backend.get_thumbnails(source_name) ] # sort is replacing the variable in place, not returning new value, it will always return None result.sort() expected.sort() self.assertEqual(result, expected) # Delete Source & Thumbnails thumbnail_key = self.backend.get_thumbnail_key(source_name) self.redis.hdel(self.backend.get_source_key(source_name), source_name) self.redis.hdel(thumbnail_key, 'small') self.redis.hdel(thumbnail_key, 'large') def test_flush_thumbnails(self): source_name = 'test-thumbnail.jpg' thumbnail_key = self.backend.get_thumbnail_key(source_name) self.backend.add_source(source_name) self.backend.add_thumbnail(source_name, "small", 'test-thumbnail_small.jpg') self.backend.add_thumbnail(source_name, "default", 'test-thumbnail_default.jpg') self.backend.flush_thumbnails(source_name) self.assertFalse(self.redis.exists(thumbnail_key))
redis.zremrangebyscore('grade', 80, 90) """ 散列操作 """ # 添加映射 redis.hset('price', 'cake', 5) # 不存在再添加 redis.hsetnx('price', 'book', 6) # 取值 redis.hget('price', 'cake') # 返回键列表中的对应值 redis.hmget('price', ['apple', 'orange']) # 批量添加映射 redis.hmset('price', {'banana': 2, 'pear': 6}) # 将键为name的散列表中映射的值增加amount redis.hincrby('price', 'apple', 3) # 判断是否存在 redis.hexists('price', 'banana') # 删除 redis.hdel('price', 'banana') # 返回个数 redis.hlen('price') # 取所有的键名 redis.hkeys('price') # 所有的键值 redis.hvals('price') # 获取所有的键值对 redis.hgetall('price') """ RedisDump提供了强大的Redis数据的导入和导出功能 """ ''' redis-dump -h 查看对应命令 -u代表Redis连接字符串, -d代表数据库代号,
# 获取散列表中多个指定映射 key 的 value print(redis.hmget('price', ['cake', 'book'])) # 向散列表中批量新增/更新映射数据 redis.hmset('price', {'apple':2, 'orange':7}) # 给映射 value 做加法 redis.hincrby('price', 'apple', 2) # 判断映射 key 是否存在 print(redis.hexists('price','banana')) # 删除一个映射键值对 redis.hdel('price', 'banana') # 获取散列表长度 print(redis.hlen('price')) # 获取散列表中所有映射 key print(redis.hkeys('price')) # 获取散列表中所有映射 value
class RespectfulRequester: def __init__(self): self.redis = StrictRedis( host=config["redis"]["host"], port=config["redis"]["port"], password=config["redis"]["password"], db=config["redis"]["database"]) try: self.redis.echo("Testing Connection") except ConnectionError: raise RequestsRespectfulRedisError("Could not establish a connection to the provided Redis server") def __getattr__(self, attr): if attr in ["delete", "get", "head", "options", "patch", "post", "put"]: return getattr(self, "_requests_proxy_%s" % attr) else: raise AttributeError() @property def redis_prefix(self): return "RespectfulRequester" def request(self, request_func, realms, wait=False): if not isinstance(realms, Sequence) or isinstance(realms, basestring): realms = [realms] for realm in realms: if realm not in self.fetch_registered_realms(): raise RequestsRespectfulError("Realm '%s' hasn't been registered" % realm) if wait: while True: try: return self._perform_request(request_func, realms) except RequestsRespectfulRateLimitedError: pass time.sleep(1) else: return self._perform_request(request_func, realms) def fetch_registered_realms(self): return list(map(lambda k: k.decode("utf-8"), self.redis.smembers("%s:REALMS" % self.redis_prefix))) def register_realm(self, realm, max_requests, timespan): redis_key = self._realm_redis_key(realm) if not self.redis.hexists(redis_key, "max_requests"): self.redis.hmset(redis_key, {"max_requests": max_requests, "timespan": timespan}) self.redis.sadd("%s:REALMS" % self.redis_prefix, realm) return True def update_realm(self, realm, **kwargs): redis_key = self._realm_redis_key(realm) updatable_keys = ["max_requests", "timespan"] for updatable_key in updatable_keys: if updatable_key in kwargs and type(kwargs[updatable_key]) == int: self.redis.hset(redis_key, updatable_key, kwargs[updatable_key]) return True def unregister_realm(self, realm): self.redis.delete(self._realm_redis_key(realm)) self.redis.srem("%s:REALMS" % self.redis_prefix, realm) request_keys = self.redis.keys("%s:REQUEST:%s:*" % (self.redis_prefix, realm)) [self.redis.delete(k) for k in request_keys] return True def realm_max_requests(self, realm): realm_info = self._fetch_realm_info(realm) return int(realm_info["max_requests".encode("utf-8")].decode("utf-8")) def realm_timespan(self, realm): realm_info = self._fetch_realm_info(realm) return int(realm_info["timespan".encode("utf-8")].decode("utf-8")) @classmethod def configure(cls, **kwargs): if "redis" in kwargs: if type(kwargs["redis"]) != dict: raise RequestsRespectfulConfigError("'redis' key must be a dict") expected_redis_keys = ["host", "port", "password", "database"] missing_redis_keys = list() for expected_redis_key in expected_redis_keys: if expected_redis_key not in kwargs["redis"]: missing_redis_keys.append(expected_redis_key) if len(missing_redis_keys): raise RequestsRespectfulConfigError("'%s' %s missing from the 'redis' configuration key" % ( ", ".join(missing_redis_keys), "is" if len(missing_redis_keys) == 1 else "are" )) config["redis"] = kwargs["redis"] if "safety_threshold" in kwargs: if type(kwargs["safety_threshold"]) != int or kwargs["safety_threshold"] < 0: raise RequestsRespectfulConfigError("'safety_threshold' key must be a positive integer") config["safety_threshold"] = kwargs["safety_threshold"] if "requests_module_name" in kwargs: if type(kwargs["requests_module_name"]) != str: raise RequestsRespectfulConfigError("'requests_module_name' key must be string") config["requests_module_name"] = kwargs["requests_module_name"] return config @classmethod def configure_default(cls): for key in config: config[key] = default_config[key] return config def _perform_request(self, request_func, realms): self._validate_request_func(request_func) limited_realms = [realm for realm in realms if not self._can_perform_request(realm)] if not limited_realms: request_uuid = str(uuid.uuid4()) for realm in realms: self.redis.setex( name="%s:REQUEST:%s:%s" % (self.redis_prefix, realm, request_uuid), time=self.realm_timespan(realm), value=request_uuid ) return request_func() else: raise RequestsRespectfulRateLimitedError( "Currently rate-limited on Realm(s): {}".format(', '.join(limited_realms))) def _realm_redis_key(self, realm): return "%s:REALMS:%s" % (self.redis_prefix, realm) def _fetch_realm_info(self, realm): redis_key = self._realm_redis_key(realm) return self.redis.hgetall(redis_key) def _requests_in_timespan(self, realm): return len( self.redis.scan( cursor=0, match="%s:REQUEST:%s:*" % (self.redis_prefix, realm), count=self._redis_keys_in_db() + 100 )[1] ) def _redis_keys_in_db(self): return self.redis.info().get("db%d" % config["redis"]["database"]).get("keys") def _can_perform_request(self, realm): return self._requests_in_timespan(realm) < (self.realm_max_requests(realm) - config["safety_threshold"]) # Requests proxy def _requests_proxy(self, method, *args, **kwargs): realms = kwargs.pop("realms", None) wait = kwargs.pop("wait", False) if realms is None: raise RequestsRespectfulError("'realms' is a required kwarg") return self.request(lambda: getattr(requests, method)(*args, **kwargs), realms, wait=wait) def _requests_proxy_delete(self, *args, **kwargs): return self._requests_proxy("delete", *args, **kwargs) def _requests_proxy_get(self, *args, **kwargs): return self._requests_proxy("get", *args, **kwargs) def _requests_proxy_head(self, *args, **kwargs): return self._requests_proxy("head", *args, **kwargs) def _requests_proxy_options(self, *args, **kwargs): return self._requests_proxy("options", *args, **kwargs) def _requests_proxy_patch(self, *args, **kwargs): return self._requests_proxy("patch", *args, **kwargs) def _requests_proxy_post(self, *args, **kwargs): return self._requests_proxy("post", *args, **kwargs) def _requests_proxy_put(self, *args, **kwargs): return self._requests_proxy("put", *args, **kwargs) @staticmethod def _validate_request_func(request_func): request_func_string = inspect.getsource(request_func) post_lambda_string = request_func_string.split(":")[1].strip() if not post_lambda_string.startswith(config["requests_module_name"]) and not post_lambda_string.startswith("getattr(requests"): raise RequestsRespectfulError("The request lambda can only contain a requests function call") @staticmethod def _config(): return config
class RedisUtils(object): def __init__(self, host, port, db, password=None): if password is not None: self._client = StrictRedis(host=host, port=port, db=db, password=password) else: self._client = StrictRedis(host=host, port=port, db=db) def common_utils(self): """ 通用操作 :return: """ pass def collection_utils(self): """ 统计操作 :return: """ pass def string_utils(self): """ 字符串操作 :return: """ str_name = "str" str_value = "str_value" int_name = "number" int_value = 25 int_str_name = "num_to_str" int_str_value = 10000 int_max_name = "max_number" int_max_value = 2444449999999999999999999 int_min_name = "min_number" int_min_value = -1111111111111111111111111 float_name = "float_number" float_value = 23.45 result = self._client.set(str_name, str_value) result_len = self._client.strlen(str_name) print("String Set key:", str_name, " value:", str_value, " result:", result, " len:", result_len) result = self._client.set(int_name, int_value) result_len = self._client.strlen(int_name) print("String Set key:", int_name, " value:", int_value, " result:", result, " len:", result_len) result = self._client.set(int_str_name, int_str_value) result_len = self._client.strlen(int_str_name) print("String Set key:", int_str_name, " value:", int_str_value, " result:", result, " len:", result_len) result = self._client.set(int_max_name, int_max_value) result_len = self._client.strlen(int_max_name) print("String Set key:", int_max_name, " value:", int_max_value, " result:", result, " len:", result_len) result = self._client.set(int_min_name, int_min_value) result_len = self._client.strlen(int_min_name) print("String Set key:", int_min_name, " value:", int_min_value, " result:", result, " len:", result_len) result = self._client.set(float_name, float_value) result_len = self._client.strlen(float_name) print("String Set key:", float_name, " value:", float_value, " result:", result, " len:", result_len) str_dict = {"str_key": "keyvalue", "int_key": 234, "float_key": 234.09} result = self._client.mset(str_dict) print("String MSet dict:", str_dict, " result:", result) null_str_name = "" null_str_value = self._client.get(null_str_name) print("String Get key:", null_str_name, " value:", null_str_value) dest_str_value = self._client.get(str_name) print("String Get key:", str_name, " value", dest_str_value, " type:", type(dest_str_value)) dest_int_value = self._client.get(int_name) print("String Get key:", int_name, " value", dest_int_value, " type:", type(dest_int_value)) multi_get_name = ["str_key", "int_key", "float_key"] result = self._client.mget(multi_get_name) result_dict = dict(zip(multi_get_name, result)) print("String MGet key:", multi_get_name, " values:", result, " type:", type(result), " dict:", result_dict) ### 自增 自减 支持负数增减 incr_name = "incr_name" incr_count = 2 result = self._client.incr(incr_name, incr_count) print("String Incr key:", incr_name, "incr_count:", incr_count, " result:", result) incr_count = -2 result = self._client.incr(incr_name, incr_count) print("String Incr key:", incr_name, "incr_count:", incr_count, " result:", result) decr_name = "decr_name" decr_count = -2 result = self._client.decr(decr_name, decr_count) print("String Decr key:", decr_name, "incr_count:", decr_count, " result:", result) decr_count = 2 result = self._client.decr(decr_name, decr_count) print("String Decr key:", decr_name, "incr_count:", decr_count, " result:", result) def list_utils(self): not_existed_list_name = "not exist list name" list_name = "list_key" list_str_value = "list value" list_int_value = 234 # O(1) result = self._client.rpush(list_name, list_str_value, list_int_value) print("List RPUSH key:", list_name, "value:", list_int_value, list_str_value, "result:", result) list_value = ("list_value_1", "list_value_2", 234) # O(1) result = self._client.rpush(list_name, *list_value) print("List RPUSH key:", list_name, "value:", list_value, "result:", result) # O(X) result = self._client.rpushx(list_name, list_value) print("List RPUSHX key:", list_name, "value:", list_value, "result:", result) result = self._client.rpushx(not_existed_list_name, list_value) print("List RPUSHX key:", not_existed_list_name, "value:", list_value, "result:", result) # O(1) result = self._client.lpop(list_name) print("List LPOP key:", list_name, " value:", result) # O(1) result = self._client.rpop(list_name) print("List RPOP key:", list_name, " value:", result) # O(n) result = self._client.lrem(list_name, 1, 231) print("List LREM key:", list_name, "result:", result) # O(n) lstart = 0 lend = 2 result = self._client.ltrim(list_name, lstart, lend) print("List LTRIM key:", list_name, " result:", result) # O(1) result = self._client.llen(list_name) print("List LLEN key:", list_name, " len:", result) # O(X) result = self._client.linsert(list_name, "before", 234, "Insert Value") print("List LINSERT key:", list_name, " result:", result) lindex = 2 # Start As 0 # O(X) result = self._client.lindex(list_name, lindex) print("List LINDEX key:", list_name, " result:", result) def set_utils(self): """ 集合操作 分类 社交 标签 :return: """ set_name = "set_name" set_value = ("set_1", "set_2", 3, 4) # O(K) result = self._client.sadd(set_name, *set_value) print("Set SADD key:", set_name, "value:", set_value, " result:", result) # O(1) result = self._client.scard(set_name) print("Set SCARD key:", set_name, " result:", result) s_find_value = "set_1" # O(1) result = self._client.sismember(set_name, s_find_value) print("Set SISMEMBER key:", set_name, " find_value:", s_find_value, " result:", result) random_count = 2 # O(K) result = self._client.srandmember(set_name, number=random_count) print("Set SRANDOMMEMBER key:", set_name, "result:", result) # O(1) result = self._client.spop(set_name) print("Set SPOP key:", set_name, " result:", result) # O(K) result = self._client.srem(set_name, *set_value) print("Set SREM key:", set_name, "value:", set_value, " result:", result) set_a_name = "set_a" set_a_value = [ "set_value_1", "set_value_3", "set_value_6", "set_value_9", "set_value_10" ] set_b_name = "set_b" set_b_value = [ "set_value_1", "set_value_3", "set_value_6", "set_value_8", "set_value_0" ] self._client.sadd(set_a_name, *set_a_value) self._client.sadd(set_b_name, *set_b_value) # O(K) result = self._client.sinter(set_a_name, set_b_name) print("Set SINTER key:", set_a_name, " key:", set_b_name, " result:", result) # O(K) result = self._client.sunion(set_a_name, set_b_name) print("Set SUNION key:", set_a_name, " key:", set_b_name, " result:", result) # O(K) result = self._client.sdiff(set_a_name, set_b_name) print("Set SDIFF key:", set_a_name, " key:", set_b_name, " result:", result) self._client.delete(set_a_name) self._client.delete(set_b_name) def zset_utils(self): """ 集合操作 应用排行榜 :return: """ zset_name = "zset_name" zset_value = { "zset_1": 23, "zset_2": 23.56, "zset_3": 23, "zset_4": -4, "zset_5": 0 } result = self._client.zadd(zset_name, **zset_value) print("zset ZADD key:", zset_name, " result:", result) result = self._client.zcard(zset_name) print("zset ZCARD key:", zset_name, " result:", result) zset_score_value = "zset_4" result = self._client.zscore(zset_name, zset_score_value) print("zset ZSCORE key:", zset_name, "value:", zset_score_value, " result:", result) result = self._client.zrank(zset_name, zset_score_value) print("zset ZRANK key:", zset_name, "value:", zset_score_value, " result:", result) result = self._client.zrevrank(zset_name, zset_score_value) print("zset ZREVRANK key:", zset_name, "value:", zset_score_value, " result:", result) zset_cursor = 0 index, result = self._client.zscan(zset_name, zset_cursor) print("zset ZSCAN key:", zset_name, " result:", result, "type:", type(result)) zset_min = 0 # Start From 0 zset_max = 2 zset_num = 2 result = self._client.zrange(zset_name, zset_min, zset_max) print("zset ZRANGE key:", zset_name, "min:", zset_min, " max:", zset_max, " result:", result) # self._client.zrangebylex(zset_name, 0, 2, num=2) # self._client.zrangebyscore(zset_name, 0, 2, num=2) self._client.delete(zset_name) def hash_utils(self): hash_name = "hash_name" hash_key = "hask_key" hash_value = "hash_value" result = self._client.hset(hash_name, hash_key, hash_value) print("hash HSET key:", hash_name, " field:", hash_key, " value:", hash_value, " result:", result) hash_content = {"name": "lee", "age": 34, "birth": 2009} result = self._client.hmset(hash_name, hash_content) print("hash HMSET content:", hash_content, " result:", result) result = self._client.hlen(hash_name) print("hash HLEN key:", hash_name, " result:", result) result = self._client.hexists(hash_name, hash_key) print("hash HEXISTS key:", hash_name, " field:", hash_key, " result:", result) result = self._client.hget(hash_name, hash_key) print("hash HGET key:", hash_name, " field:", hash_key, " result:", result) hash_keys = ("name", "age") result = self._client.hmget(hash_name, *hash_keys) print("hash HMGET key:", hash_name, " field:", hash_keys, " result:", result) hash_cursor = 0 result = self._client.hscan(hash_name, hash_cursor) print("hash HSCAN key:", hash_name, " result:", result) result = self._client.hkeys(hash_name) print("hash HKEYS key:", hash_name, "result:", result) result = self._client.hdel(hash_name, hash_key) print("hash HDEL key:", hash_name, " field:", hash_key, " result:", result) result = self._client.hdel(hash_name, *hash_keys) print("hash HDEL key:", hash_name, " field:", hash_key, " result:", result)
from redis import StrictRedis redis = StrictRedis(host='localhost', port=6379, db=0, password='******') redis.set('name', 'Bob') print(redis.get('name')) redis.hexists("room", "a")
class RedisBackendTest(TestCase): def setUp(self): self.backend = RedisBackend() self.redis = StrictRedis() def test_get_source_key(self): self.assertEqual(self.backend.get_source_key('a.jpg'), 'djthumbs-test:sources:a.jpg') def test_get_thumbnail_key(self): self.assertEqual(self.backend.get_thumbnail_key('a.jpg'), 'djthumbs-test:thumbnails:a.jpg') def test_add_delete_source(self): source_name = 'test-thumbnail.jpg' source_key = self.backend.get_source_key(source_name) self.backend.add_source(source_name) self.assertTrue(self.redis.hexists(source_key, source_name)) self.backend.delete_source(source_name) self.assertFalse(self.redis.hexists(source_key, source_name)) def test_get_source(self): source_name = 'test-thumbnail.jpg' source_key = self.backend.get_source_key(source_name) self.redis.hset(source_key, source_name, source_name) self.assertEqual(self.backend.get_source(source_name), source_name) # Delete Source self.redis.hdel(source_key, source_name) def test_add_delete_thumbnail(self): source_name = 'test-thumbnail.jpg' size = 'small' thumbnail_key = self.backend.get_thumbnail_key(source_name) self.backend.add_source(source_name) self.backend.add_thumbnail(source_name, size, 'test-thumbnail_small.jpg') self.assertTrue(self.redis.hexists(thumbnail_key, size)) self.backend.delete_thumbnail(source_name, size) self.assertFalse(self.redis.hexists(thumbnail_key, size)) # Delete Source self.redis.hdel(self.backend.get_source_key(source_name), source_name) def test_get_thumbnail(self): source_name = 'test-thumbnail.jpg' self.backend.add_source(source_name) self.backend.add_thumbnail(source_name, 'small', 'test-thumbnail_small.jpg') self.assertEqual(self.backend.get_thumbnail(source_name, 'small'), ImageMeta(source_name, 'test-thumbnail_small.jpg', 'small')) self.backend.add_thumbnail(source_name, 'large', 'test-thumbnail_large.jpg') expected = ['test-thumbnail_large.jpg', 'test-thumbnail_small.jpg'] result = [image_meta.name for image_meta in self.backend.get_thumbnails(source_name)] self.assertEqual(result.sort(), expected.sort()) # Delete Source & Thumbnails thumbnail_key = self.backend.get_thumbnail_key(source_name) self.redis.hdel(self.backend.get_source_key(source_name), source_name) self.redis.hdel(thumbnail_key, 'small') self.redis.hdel(thumbnail_key, 'large')
class RespectfulWebdriver: default_config = { "redis": { "host": "localhost", "port": 6379, "database": 0 }, "safety_threshold": 0 } def __init__(self, **kwargs): self.config = self._load_config() self.webdriver = kwargs.get("webdriver") if not WebDriver in self.webdriver.__class__.__bases__: raise SeleniumRespectfulError( "The provided webdriver does not inherit from RemoteWebDriver") self.redis = StrictRedis( host=self.config["redis"]["host"], port=self.config["redis"]["port"], db=self.config["redis"]["database"], ) try: self.redis.echo("Testing Connection") except ConnectionError: raise SeleniumRespectfulError( "Could not establish a connection to the provided Redis server" ) def __getattr__(self, attr): if attr == "get": return getattr(self, "_selenium_webdriver_proxy_%s" % attr) else: return getattr(self.webdriver, attr) @property def redis_prefix(self): return "SeleniumRequester" def register_realm(self, realm, max_requests, timespan): redis_key = self._realm_redis_key(realm) if not self.redis.hexists(redis_key, "max_requests"): self.redis.hmset(redis_key, { "max_requests": max_requests, "timespan": timespan }) self.redis.sadd("%s:REALMS" % self.redis_prefix, realm) return True def register_realms(self, realm_tuples): for realm_tuple in realm_tuples: self.register_realm(*realm_tuple) return True def update_realm(self, realm, **kwargs): redis_key = self._realm_redis_key(realm) updatable_keys = ["max_requests", "timespan"] for updatable_key in updatable_keys: if updatable_key in kwargs and type(kwargs[updatable_key]) == int: self.redis.hset(redis_key, updatable_key, kwargs[updatable_key]) return True def unregister_realm(self, realm): self.redis.delete(self._realm_redis_key(realm)) self.redis.srem("%s:REALMS" % self.redis_prefix, realm) request_keys = self.redis.keys("%s:REQUEST:%s:*" % (self.redis_prefix, realm)) [self.redis.delete(k) for k in request_keys] return True def unregister_realms(self, realms): for realm in realms: self.unregister_realm(realm) return True def fetch_registered_realms(self): return list( map(lambda k: k.decode("utf-8"), self.redis.smembers("%s:REALMS" % self.redis_prefix))) def realm_max_requests(self, realm): realm_info = self._fetch_realm_info(realm) return int(realm_info["max_requests".encode("utf-8")].decode("utf-8")) def realm_timespan(self, realm): realm_info = self._fetch_realm_info(realm) return int(realm_info["timespan".encode("utf-8")].decode("utf-8")) def _load_config(self): try: with open("selenium-respectful.config.yml", "r") as f: config = yaml.load(f) if "safety_threshold" not in config: config["safety_threshold"] = self.__class__.default_config.get( "safety_threshold") else: if not isinstance(config["safety_threshold"], int) or config["safety_threshold"] < 0: raise SeleniumRespectfulError( "'safety_threshold' key must be a positive integer in 'selenium-respectful.config.yml'" ) if "redis" not in config: raise SeleniumRespectfulError( "'redis' key is missing from 'selenium-respectful.config.yml'" ) expected_redis_keys = ["host", "port", "database"] missing_redis_keys = list() for expected_redis_key in expected_redis_keys: if expected_redis_key not in config["redis"]: missing_redis_keys.append(expected_redis_key) if len(missing_redis_keys): raise SeleniumRespectfulError( "'%s' %s missing from the 'redis' configuration key in 'selenium-respectful.config.yml'" % (", ".join(missing_redis_keys), "is" if len(missing_redis_keys) == 1 else "are")) except FileNotFoundError: return copy.deepcopy(self.__class__.default_config) def _can_perform_get(self, realm): return self._requests_in_timespan(realm) < ( self.realm_max_requests(realm) - self.config["safety_threshold"]) def _realm_redis_key(self, realm): return "%s:REALMS:%s" % (self.redis_prefix, realm) def _fetch_realm_info(self, realm): redis_key = self._realm_redis_key(realm) return self.redis.hgetall(redis_key) def _requests_in_timespan(self, realm): return len( self.redis.scan(cursor=0, match="%s:REQUEST:%s:*" % (self.redis_prefix, realm), count=self._redis_keys_in_db() + 100)[1]) def _redis_keys_in_db(self): return self.redis.info().get( "db%d" % self.config["redis"]["database"]).get("keys") def _selenium_webdriver_proxy_get(self, *args, **kwargs): realms = kwargs.pop("realms", list()) if not len(realms): raise SeleniumRespectfulError("'realms' is a required kwarg") wait = kwargs.pop("wait", False) return self._webdriver_get(lambda: self.webdriver.get(*args, **kwargs), realms=realms, wait=wait) def _webdriver_get(self, get_func, realms=None, wait=False): registered_realms = self.fetch_registered_realms() for realm in realms: if realm not in registered_realms: raise SeleniumRespectfulError( "Realm '%s' hasn't been registered" % realm) if wait: while True: try: return self._perform_webdriver_get(get_func, realms=realms) except SeleniumRespectfulRateLimitedError: pass time.sleep(1) else: return self._perform_webdriver_get(get_func, realms=realms) def _perform_webdriver_get(self, get_func, realms=None): self._validate_get_func(get_func) rate_limited_realms = list() for realm in realms: if not self._can_perform_get(realm): rate_limited_realms.append(realm) if not len(rate_limited_realms): for realm in realms: request_uuid = str(uuid.uuid4()) self.redis.setex(name="%s:REQUEST:%s:%s" % (self.redis_prefix, realm, request_uuid), time=self.realm_timespan(realm), value=request_uuid) return get_func() else: raise SeleniumRespectfulRateLimitedError( "Currently rate-limited on Realm(s): %s" % ", ".join(rate_limited_realms)) @staticmethod def _validate_get_func(get_func): if not isinstance(get_func, LambdaType): raise SeleniumRespectfulError( "'get_func' is expected to be a lambda") get_func_string = inspect.getsource(get_func) post_lambda_string = get_func_string.split(":")[1].strip() if not post_lambda_string.startswith("self.webdriver.get"): raise SeleniumRespectfulError( "The lambda can only contain a self.webdriver.get function call" )
class RedisLRU(object): def __init__(self, redis=None, **kwargs): if redis is not None: self._redis = redis else: self._redis = Redis(**kwargs) self.namespaces = { "default": 10000 } def setup_namespace(self, namespace, size): """Set the LRU Size for a namespace. """ self.namespaces[namespace] = int(size) def _serialize(self, s): # return json.dumps(s) return s def _unserialize(self, s): # s = s.decode("utf-8") # return json.loads(s) return s def _size(self, namespace): return self.namespaces[namespace] def _hit_store(self, namespace): if namespace not in self.namespaces: raise KeyError("invalid namespace") return "cache_keys_{}".format(namespace) def _value_store(self, namespace): if namespace not in self.namespaces: raise KeyError("invalid namespace") return "cache_values_{}".format(namespace) def _expire_old(self, namespace): hits = self._hit_store(namespace) size = self._size(namespace) count = self._redis.zcard(hits) if count >= size: values = self._value_store(namespace) items = self._redis.zrange(hits, 0, count-size) logger.error(items) self._redis.zremrangebyrank(hits, 0, count-size) self._redis.hdel(values, *items) def clear(self, namespace="default"): """Clear the Cache. """ hits = self._hit_store(namespace) values = self._value_store(namespace) self._redis.delete(hits, values) def clearAll(self): """Clear all known namespaces. """ for k in self.namespaces.iterkeys(): self.clear(k) def store(self, key, value, namespace="default"): """Store a key value pair in cache. This will not update an existing item. """ values = self._value_store(namespace) if not self._redis.hexists(values, key): hits = self._hit_store(namespace) self._expire_old(namespace) self._redis.hset(values, key, self._serialize(value)) self._redis.zadd(hits, time.time(), key) else: hits = self._hit_store(namespace) self._redis.hset(values, key, self._serialize(value)) self._redis.zadd(hits, time.time(), key) def get(self, key, namespace="default"): """Get a value from the cache. returns none if the key is not found. """ values = self._value_store(namespace) value = self._redis.hget(values, key) if value: hits = self._hit_store(namespace) self._redis.zadd(hits, time.time(), key) return self._unserialize(value) return None def expire(self, key, namespace="default"): """Expire (invalidate) a key from the cache. """ values = self._value_store(namespace) if self._redis.hexists(values, key): hits = self._hit_store(namespace) self._redis.hdel(values, key) self._redis.zrem(hits, key)
from redis import StrictRedis, ConnectionPool pool = ConnectionPool(host='localhost', port=6379, db=0, password=None) redis = StrictRedis(connection_pool=pool) #print(redis.hset('price','cake',5)) # 向键名为price的散列表添加映射关系,返回1 即添加的映射个数 #print(redis.hsetnx('price','book',6)) # 向键名为price的散列表添加映射关系,返回1 即添加的映射个数 print(redis.hget('price', 'cake')) #获取键名为cake的值 返回5 #print(redis.hmset('price',{'banana':2,'apple':3,'pear':6,'orange':7})) #批量添加映射 print(redis.hmget('price', ['apple', 'orange'])) #查询apple和orange的值 输出 b'3',b'7' #print(redis.hincrby('price','apple',3)) #apple映射加3 为6 print(redis.hexists('price', 'banana')) #在price中banana是否存在 返回True #print(redis.hdel('price','banana')) #从price中删除banana 返回1 print(redis.hlen('price')) #输出price的长度 print(redis.hkeys('price')) #输出所有的映射键名 print(redis.hvals('price')) #输出所有的映射键值 print(redis.hgetall('price')) #输出所有的映射键对
class redis(object): def __init__(self): super(redis, self).__init__() configs = conf.config.configs['redis'] url = "redis://:" + configs['pass'] + "@" + configs[ 'host'] + ":" + configs['port'] + "/1" pool = ConnectionPool.from_url(url) self.redis = StrictRedis(connection_pool=pool) self.expireTime = configs['expireTime'] def getRedit(self): return self.redis def rget(self, key): vl = self.redis.get(key) return pickle.loads(vl) def rset(self, key, value): value = pickle.dumps(value) return self.redis.set(key, value) def hget(self, apikey, key): vl = self.redis.hget(apikey, key) return pickle.loads(vl) def hset(self, apikey, key, value): value = pickle.dumps(value) array = self.redis.hset(apikey, key, value) self.redis.expire(apikey, self.expireTime) return array def hexist(self, apikey, key): return self.redis.hexists(apikey, key) def hdel(self, apikey, key): return self.redis.hdel(apikey, key) #分布式锁 当key存在时,返回False,否则设置成功,返回True def rsetnx(self, key, value): return self.redis.setnx(key, value) def rdel(self, key): return self.redis.delete(key) #向尾部添加元素 lpush 队列 def rpush(self, key, *value): return self.redis.rpush(key, value) #返回并删除首元素 rpop def lpop(self, key): return self.redis.lpop(key)
class OrderBook: # not that sequence ids will be cast to integers # order book is also maintained in redis def __init__(self, product: Product, sequence_id: int = 0) -> None: self.redis_server = StrictRedis(host='localhost', port=6379, db=0, encoding="utf-8", decode_responses=True) self.product = product self.sequence_id = int(sequence_id) self.order_book = {side: {} for side in OrderSide} self.trades = {side: {order_type: {} for order_type in OrderType} for side in OrderSide} self.orders_added = 0 self.orders_subtracted = 0 def get_product_id(self) -> str: return self.get_product().get_product_id() def get_product(self) -> Product: return self.product def get_sequence_id(self) -> int: return self.sequence_id # this method determines the best maker price at which to place an order # so as to fill AT least quantity def get_network_price(self, side: OrderSide, total_quantity: float, desired_quantity: float = 0, allow_exceed_best: bool = True) -> Tuple[Optional[str], Optional[float]]: if total_quantity is None: raise OrderBookException('Total quantity cannot be none in get_price: {}'.format(total_quantity)) # this is how much approximately we need to fill first to get best possible price other_quantity = total_quantity - desired_quantity best_price, worst_price, total_price, error, worst_qty = self.get_price(side=side, depth=other_quantity) # prices haven't loaded yet if worst_price is None or best_price is None: return None, None # this means we can optimally place at the back of the queue within an error of the minimum quote size # we do all comparisons as floats to save time at the cost of accuracy here elif error <= float(self.get_product().get_base_min_size_str()): return str(worst_price), desired_quantity # best bid and best ask are separated by the minimum spread so there is nowhere else for me to go # return available qty of 0 at best price elif best_price == worst_price and (self.spread_locked() or not allow_exceed_best): return str(best_price), 0. # take a slightly worse price but in exchange fill more quantity if side == OrderSide.bid: new_price = self.get_product().get_higher_price(str(worst_price)) else: new_price = self.get_product().get_lower_price(str(worst_price)) return str(new_price), desired_quantity + worst_qty - error def spread_locked(self) -> bool: best_bid, best_ask = self.get_best_bid_ask(0) if best_bid and best_ask: if self.get_product().get_higher_price(str(best_bid)) == self.get_product().round_price(str(best_ask)): return True return False # this returns # (best price, worst price, total price for depth, excess quantity above depth, filled qty at worst price) # in product quote currency def get_price(self, side: OrderSide, depth: float = 0) -> Tuple[float, float, float, float, float]: if depth is None: raise OrderBookException('depth cannot by none in get_price: {}'.format(depth)) total_price = 0. total_qty = 0. best_price = None worst_price = None excess_qty = 0. worst_qty = 0. reverse_order_sort = True if side is OrderSide.bid else False counter = 0 iter_count = 10 keep_going = True while keep_going: price_keys = self.redis_server.zrange(self.__get_ob_order_set_redis_key(side), counter, counter + iter_count - 1, withscores=True, desc=reverse_order_sort) if len(price_keys) == 0: break sizes = self.redis_server.mget(map(lambda x: x[0], price_keys)) for idx, (_, price) in enumerate(price_keys): if best_price is None: best_price = price worst_price = price if sizes[idx] is None: continue size = float(sizes[idx]) qty = min(size, depth - total_qty) excess_qty = size - qty worst_qty = size total_price = total_price + (price * qty) total_qty = total_qty + qty if total_qty >= depth: keep_going = False break counter = counter + iter_count return best_price, worst_price, total_price, excess_qty, worst_qty def get_best_bid(self, depth: float = 0) -> float: return self.get_price(OrderSide.bid, depth)[1] def get_best_ask(self, depth: float = 0) -> float: return self.get_price(OrderSide.ask, depth)[1] def get_best_bid_ask(self, depth=0) -> Tuple[float, float]: return self.get_best_bid(depth), self.get_best_ask(depth) def get_best(self, side: OrderSide, depth: float = 0) -> float: if side == OrderSide.bid: return self.get_best_bid(depth) else: return self.get_best_ask(depth) def validate_order(self, order: Order) -> bool: if isinstance(order, Order): if order.get_product_id() != self.get_product_id(): raise OrderBookException( 'You can only add orders with the same product id {} {}'.format(self.get_product_id(), order.get_product_id()) ) elif order.get_sequence_id() < self.get_sequence_id(): raise SequenceException( 'You cannot add orders with a lower sequence id {} {}'.format(self.get_sequence_id(), order.get_sequence_id()) ) else: return True else: raise OrderBookException( 'You can only add orders of type Order to an order book {}'.format(str(type(order))) ) # optimize so that this is way faster def validate(self) -> None: try: max_bid, min_ask = self.get_best_bid_ask() if max_bid and min_ask and max_bid > min_ask: raise OrderBookException( 'Max bid ({}) exceeds Min ask ({}) for product {}'.format(max_bid, min_ask, self.get_product_id()) ) except ValueError: pass # we need this to round to the nearest group by period! # group_by_period = None means no grouping at all # 1) Sort orders by created_at_time (first is most recent) # 2) For each created_at_time get rounded seconds ago created # 3) If same as previous order the increment array element # 4) Else append array element def get_trade_quantities(self, side: OrderSide, order_type: OrderType, seconds_ago: int, group_by_period: int = None) -> List[float]: now_time = float(datetime.now(tz.tzutc()).strftime('%s')) first_time = now_time - seconds_ago quantities = [] last_created_at = None # this gets all relevant keys size_key_by_timestamp = self.redis_server.zrangebyscore(self.__get_th_order_set_redis_key(order_type, side), first_time, now_time, withscores=True) if len(size_key_by_timestamp) == 0: return [] sizes = self.redis_server.mget(map(lambda x: x[0], size_key_by_timestamp)) for idx, (_, timestamp) in enumerate(size_key_by_timestamp): size = sizes[idx] if size is None: continue else: size = float(sizes[idx]) created_at = int(timestamp) quantity = 0 if group_by_period is None: r_created_at = created_at else: r_created_at = (created_at / group_by_period) * group_by_period quantity = quantity + size if r_created_at == last_created_at: quantities[-1] = quantities[-1] + quantity else: quantities.append(quantity) last_created_at = r_created_at return quantities def get_volume(self, side: OrderSide, order_type: OrderType, seconds_ago: int) -> float: order_quantities = self.get_trade_quantities(side, order_type, seconds_ago) return sum(order_quantities) def get_edge_trade_size(self, side: OrderSide, order_type: OrderType, seconds_ago: int, edge_type: EdgeType, group_by_period: Optional[int] = None) -> float: qty = None if edge_type == EdgeType.best: qty = 0. elif edge_type == EdgeType.mean: qty = self.get_average_trade_size(side, order_type, seconds_ago, group_by_period) elif edge_type == EdgeType.median: qty = self.get_median_trade_size(side, order_type, seconds_ago, group_by_period) elif edge_type == EdgeType.custom: qty = self.get_average_trade_size(side, order_type, seconds_ago, group_by_period) if qty is not None: qty = qty / 10. return qty def get_average_trade_size(self, side: OrderSide, order_type: OrderType, seconds_ago: int, group_by_period: Optional[int] = None) -> Optional[float]: order_quantities = self.get_trade_quantities(side, order_type, seconds_ago, group_by_period) if len(order_quantities) == 0: return None return mean(order_quantities) def get_median_trade_size(self, side: OrderSide, order_type: OrderType, seconds_ago: int, group_by_period: Optional[int] = None) -> Optional[float]: order_quantities = self.get_trade_quantities(side, order_type, seconds_ago, group_by_period) if len(order_quantities) == 0: return None return median(order_quantities) def get_mode_trade_size(self, side: OrderSide, order_type: OrderType, seconds_ago: int, group_by_period: Optional[int] = None) -> Optional[float]: order_quantities = self.get_trade_quantities(side, order_type, seconds_ago, group_by_period) if len(order_quantities) == 0: return None try: return mode(order_quantities) except StatisticsError: return None def __get_root_ob_redis_key(self, side: OrderSide) -> str: return 'order_book:book:{}:{}'.format(self.get_product_id(), side.name) # this key points to a hash of order_id => size def __get_ob_order_hash_redis_key(self, side: OrderSide, price: str) -> str: return '{}:{:.5f}:order_list'.format(self.__get_root_ob_redis_key(side), float(price)) # this key points to the sum of orders at this price def __get_ob_sum_size_redis_key(self, side: OrderSide, price: str) -> str: return '{}:{:.5f}:order_size_sum'.format(self.__get_root_ob_redis_key(side), float(price)) # this key points to a set with price keys and score of price to facilitate getting a range def __get_ob_order_set_redis_key(self, side: OrderSide) -> str: return '{}'.format(self.__get_root_ob_redis_key(side)) # redis key for trade history def __get_th_order_set_redis_key(self, order_type: OrderType, side: OrderSide) -> str: return 'order_book:history:trades:{}:{}:{}'.format(self.get_product_id(), side.name, order_type.name) # redis key for trade history def __get_th_redis_key(self, order_type: OrderType, side: OrderSide, timestamp: str) -> str: return 'order_book:history:trades:{}:{}:{}:{}'.format(self.get_product_id(), side.name, order_type.name, timestamp) @staticmethod def __get_pr_redis_key(side: OrderSide) -> str: return 'order_book:changed_products:{}'.format(side.name) def __add_trade_to_trade_history(self, order: Order) -> None: th_set_key = self.__get_th_order_set_redis_key(order.get_order_type(), order.get_order_side()) th_order_size_key = self.__get_th_redis_key(order.get_order_type(), order.get_order_side(), order.get_unix_timestamp()) self.redis_server.zadd(th_set_key, order.get_unix_timestamp(), th_order_size_key) self.redis_server.incrbyfloat(th_order_size_key, order.get_size()) def __update_sequence_id(self, sequence_id: int) -> None: if sequence_id > self.sequence_id: self.sequence_id = sequence_id def __remove_order(self, order: Order) -> None: price = order.get_price() side = order.get_order_side() order_key = self.__get_ob_order_hash_redis_key(side, price) size_key = self.__get_ob_sum_size_redis_key(side, price) self.redis_server.hdel(order_key, order.get_order_id()) if self.redis_server.hlen(order_key) == 0: # why bother deleting here? self.redis_server.delete(order_key) self.redis_server.delete(size_key) self.redis_server.zrem(self.__get_ob_order_set_redis_key(side), self.__get_ob_sum_size_redis_key(side, price)) else: self.redis_server.incrbyfloat(size_key, '-' + order.get_size()) def __change_order(self, order: Order) -> None: price = order.get_price() order_id = order.get_order_id() side = order.get_order_side() new_order_size = order.get_filled_size() order_key = self.__get_ob_order_hash_redis_key(side, price) size_key = self.__get_ob_sum_size_redis_key(side, price) if self.redis_server.hexists(order_key, order_id): self.redis_server.hset(order_key, order_id, new_order_size) self.redis_server.incrbyfloat(size_key, '-' + order.get_remaining_size()) def __match_order(self, order: Order) -> None: price = order.get_price() order_id = order.get_order_id() side = order.get_order_side() order_key = self.__get_ob_order_hash_redis_key(side, price) size_key = self.__get_ob_sum_size_redis_key(side, price) self.redis_server.hincrbyfloat(order_key, order_id, '-' + order.get_size()) self.redis_server.incrbyfloat(size_key, '-' + order.get_size()) def __register_product_change(self, side) -> None: self.redis_server.sadd(self.__get_pr_redis_key(side), self.get_product_id()) # allow addition of order to order book # this should be used for new orders def __add__(self, order: Order) -> None: self.validate_order(order) self.__update_sequence_id(order.get_sequence_id()) price = order.get_price() side = order.get_order_side() if not order.get_historical(): # This marks that we have live orders at this price self.redis_server.zadd(self.__get_ob_order_set_redis_key(side), price, self.__get_ob_sum_size_redis_key(side, price)) # This adds the order to a list of orders keyed off price self.redis_server.hset(self.__get_ob_order_hash_redis_key(side, price), order.get_order_id(), order.get_size()) self.redis_server.incrbyfloat(self.__get_ob_sum_size_redis_key(side, price), order.get_size()) self.__register_product_change(order.get_order_side()) self.orders_added = self.orders_added + 1 # if self.orders_added % 1000 == 1: # print('Heartbeat {} orders added to {}'.format(self.orders_added, self.get_product_id())) self.__add_trade_to_trade_history(order) # allow subtraction of order from order book def __sub__(self, order: Order) -> None: self.validate_order(order) self.__update_sequence_id(order.get_sequence_id()) if not order.get_historical(): if order.get_status() in [OrderStatus.filled, OrderStatus.canceled]: self.__remove_order(order) elif order.get_order_type() == OrderType.change: self.__change_order(order) else: self.__match_order(order) self.__register_product_change(order.get_order_side()) self.orders_subtracted = self.orders_subtracted + 1 # if self.orders_subtracted % 1000 == 1: # print('Heartbeat {} orders removed from {}'.format(self.orders_subtracted, self.get_product_id())) self.__add_trade_to_trade_history(order)
class Moose(object): def __init__(self, HOST, PORT, NICK): super(Moose, self).__init__() self.HOST = HOST self.PORT = PORT self.NICK = NICK self.redis_server = StrictRedis(host='127.0.0.1', port=6379, db=0) self.irc = socket() self.commands = { "challs": { "number_of_args": 0, "text": "!challs - Get all the challenges with info", "method": self.challs, }, "add": { "number_of_args": -1, "username": True, "text": "!add [challenge_name OR challenge_id] [url or text] - Add some info to a challenge to help others out", "method": self.add, }, "get": { "number_of_args": 1, "text": "!get [challenge_name] OR !get #[challenge_id] - Get a gist with all the info for a challenge", "method": self.get, }, "calendar": { "number_of_args": 0, "text": "!calendar - Get the calendar url", "method": self.calendar, }, "id": { "number_of_args": 1, "text": "!id [hash] - Identify a hash", "method": self.idhash }, "purge": { "number_of_args": 0, "username": True, "text": "!purge - Remove all challenges (zachzor only)", "method": self.purge }, "farts": { "number_of_args": 0, "text": "!farts - Moose farts", "method": self.farts }, "help": { "number_of_args": 1, "text": "!help [command] - Get info on how to use a command", "method": self.help }, } f = open("github_oauth_token", "r") lines = f.readlines() if len(lines) < 1: raise Exception("No token in github_oauth_token!") self.headers = {"Authorization": "token %s" % lines[0].strip(), "User-Agent": "ecxinc"} f.close() def create_gist(self, problem_name, problem_info): gist = { "files": { "%s.txt" % problem_name: { "content": "\n".join("[%s %s] %s" % (info.name, info.date, info.info) for info in problem_info) } }, "public": False } r = requests.post("https://api.github.com/gists", headers=self.headers, data=dumps(gist)) if r.status_code != 201: raise GistException("Couldn't create gist!") return loads(r.text)["html_url"] def connect(self): print "Connecting..." self.irc.connect((self.HOST, self.PORT)) self.irc.send("NICK %s\r\n" % self.NICK) self.irc.send("USER %s %s bla :%s\r\n" % (self.NICK, self.NICK, self.NICK)) self.irc.send("JOIN #ctf\r\n") print "Connected!" self.serve_and_possibly_protect() def parsemsg(self, s): # Breaks a message from an IRC server into its username, command, and arguments. username, trailing = "", [] if not s: return "" if s[0] == ':': username, s = s[1:].split(' ', 1) username_info = username.split("!") if len(username_info) > 1: username = username_info[0] if s.find(' :') != -1: s, trailing = s.split(' :', 1) args = s.split() args.append(trailing.strip().split(" ")) else: args = s.split() command = args.pop(0) return username, command, args def send_message(self, message): self.irc.send("PRIVMSG #ctf :%s\r\n" % message) def handle_message(self, username, channel, args): if len(args) < 1: return arg = args.pop(0)[1:] if arg == "help" and len(args) == 0: self.help("") elif arg in self.commands.keys(): arg_num = self.commands[arg]["number_of_args"] params = [] if len(args) < arg_num: self.help(arg) return elif arg_num == 0: params = [] elif arg_num == -1: params = [args] else: params = args[:arg_num] if self.commands[arg].get("username", False): self.commands[arg]["method"](username, *params) else: self.commands[arg]["method"](*params) elif arg in self.commands.keys(): self.help(arg) def purge(self, username): if username == "zachzor": self.redis_server.delete("challs") self.send_message("All challenges removed") def get(self, challenge_name): if challenge_name[0] == '#': try: challenge_number = int(challenge_name[1:]) except ValueError: self.send_message("%s is not a valid challenge id" % challenge_name) return if self.redis_server.hlen("challs") <= challenge_number or challenge_number < 0: self.send_message("%s is not a valid challenge id" % challenge_name) return else: name = [(i, s) for i, s in enumerate(self.redis_server.hkeys("challs"))][challenge_number][1] try: gist = self.create_gist(name, pickle.loads(self.redis_server.hget("challs", name))) self.send_message("%s" % gist) except GistException: self.send_message("Unable to create gist") else: if not self.redis_server.hexists("challs", challenge_name): self.send_message("%s is not a valid challenge name" % challenge_name) return else: try: gist = self.create_gist(challenge_name, pickle.loads(self.redis_server.hget("challs", challenge_name))) self.send_message("%s" % gist) except GistException: self.send_message("Unable to create gist") def farts(self): self.send_message(" ".join(list(["pfffttt"] * randint(1, 7)))) def add(self, username, args): if len(args) < 2: self.help("add") return challenge_name, description = args[0], args[1:] new_info = InfoMessage(username, datetime.now().strftime("%m-%d-%Y %H:%M:%S"), " ".join(description)) if self.redis_server.hget("challs", challenge_name) == None: self.redis_server.hset("challs", challenge_name, pickle.dumps([new_info])) else: old = pickle.loads(self.redis_server.hget("challs", challenge_name)) old.append(new_info) self.redis_server.hset("challs", challenge_name, pickle.dumps(old)) self.send_message("Added!") def idhash(self, hash): hash_type = HashChecker(hash) hashzor = hash_type.check_hash() if hashzor == None: self.send_message("Hmm... I'm not sure about that one") else: self.send_message("That's probably a %s hash" % hashzor) def challs(self): if self.redis_server.hlen("challs") == 0: self.send_message("No challenges") else: self.send_message("Challenges: %s" % ", ".join(["[%d] %s" % (i, s) for i, s in enumerate(self.redis_server.hkeys("challs"))])) def calendar(self): self.send_message("http://d.pr/Baur") def help(self, method_name): print method_name if method_name not in self.commands.keys(): self.send_message(", ".join(self.commands.keys())) else: self.send_message(self.commands[method_name]["text"]) def serve_and_possibly_protect(self): while 1: data = self.irc.recv(4096) username, command, args = self.parsemsg(data) if command == "PING": self.irc.send("PONG " + data[1] + '\r\n') elif command == "PRIVMSG": if len(args[1]) > 0 and args[1][0][0] == "!": self.handle_message(username, args[0], [x.lower() for x in args[1]])
class RedisDatabase: """ docstring for RedisDatabase """ def __init__(self, password=""): self.start_redis() def is_redis_running(self): try: if sys.platform == 'win32': process = len( os.popen('tasklist | findstr ' + "redis-server.exe").readlines()) if process >= 1: return True else: return False elif sys.platform == 'darwin': # macOS return True else: # other platform return True except Exception as e: raise Exception( 'Unable to check redis running staate,error message: ' + str(e)) def start_redis(self, password=''): try: if not self.is_redis_running(): if sys.platform == 'win32': os.system("e:/redis/redis-server --service-start") elif sys.platform == 'darwin': # macOS pass else: pass except Exception as e: raise Exception('Unble to start redis, error message: ' + str(e)) try: self.datadb = StrictRedis(host='localhost', port=6379, db=0) self.cachedb = StrictRedis(host='localhost', port=6379, db=1, decode_responses=True) self.hashdb = StrictRedis(host='localhost', port=6379, db=2) except Exception as e: raise Exception('Redis connection failed,error message:' + str(e)) def stop_redis(self): try: if self.is_redis_running(): # self.flushall() if sys.platform == 'win32': os.system("e:/redis/redis-server --service-stop") elif sys.platform == 'darwin': pass else: pass except Exception as e: raise Exception('Unble to stop redis,error message:' + str(e)) def set_value(self, obj, data_source): """ Using a dictionary, a Mongdb object, a Net class, a Attr class, a DynamicNet class or a DynamicAttr class to set a new entry in Redis. """ if type(obj) is dict: key = self.generate_static_key(data_source, obj['scan'], obj['atlas'], obj['feature']) self.datadb.set(key, obj['value'], ex=1800) return self.trans_netattr(obj['scan'], obj['atlas'], obj['feature'], pickle.loads(obj['value'])) elif type(obj) is pymongo.cursor.Cursor: value = [] scan = obj[0]['scan'] atlas = obj[0]['atlas'] feature = obj[0]['feature'] window_length = obj[0]['window_length'] step_size = obj[0]['step_size'] key_all = self.generate_dynamic_key(data_source, scan, atlas, feature, window_length, step_size) pipe = self.datadb.pipeline() length = obj.count() try: pipe.multi() pipe.set(key_all + ':0', length, ex=1600) for i in range(length): # 使用查询关键字保证升序 pipe.set(key_all + ':' + str(i + 1), (obj[i]['value']), ex=1800) value.append(pickle.loads(obj[i]['value'])) pipe.execute() except Exception as e: raise Exception( 'An error occur when tring to set value in redis, error message: ' + str(e)) return self.trans_dynamic_netattr(scan, atlas, feature, window_length, step_size, np.array(value)) elif type(obj) is netattr.Net or type(obj) is netattr.Attr: key = self.generate_static_key(data_source, obj.scan, obj.atlasobj.name, obj.feature_name) self.atadb.set(key, pickle.dumps(obj.data)) elif type(obj) is netattr.DynamicNet or type( obj) is netattr.DynamicAttr: key_all = self.generate_dynamic_key(data_source, obj.scan, obj.atlasobj.name, obj.feature_name, obj.window_length, obj.step_size) length = obj.data.shape[2] pipe = self.datadb.pipeline() if type(obj) is netattr.DynamicNet: flag = True else: flag = False try: pipe.multi() pipe.set(key_all + ':0', length, ex=1600) for i in range(length): # 使用查询关键字保证升序 if flag: pipe.set(key_all + ':' + str(i + 1), pickle.dumps(obj.data[:, :, i]), ex=1800) else: pipe.set(key_all + ':' + str(i + 1), obj.data[:, i], ex=1800) pipe.execute() except Exception as e: raise Exception( 'An error occur when tring to set value in redis, error message: ' + str(e)) def generate_static_key(self, data_source, subject_scan, atlas_name, feature_name): key = data_source + ':' + subject_scan + ':' + atlas_name + ':' + feature_name + ':0' return key def generate_dynamic_key(self, data_source, subject_scan, atlas_name, feature_name, window_length, step_size): key = data_source + ':' + subject_scan + ':' + atlas_name + ':' + feature_name + ':1:' + str( window_length) + ':' + str(step_size) return key def get_static_value(self, data_source, subject_scan, atlas_name, feature_name): """ Using data source, scan name, altasobj name, feature name to query static networks and attributes from Redis. If the query succeeds, return a Net or Attr class, if not, return none. """ key = self.generate_static_key(data_source, subject_scan, atlas_name, feature_name) res = self.datadb.get(key) self.datadb.expire(key, 1800) if res is not None: return self.trans_netattr(subject_scan, atlas_name, feature_name, pickle.loads(res)) else: return None def trans_netattr(self, subject_scan, atlas_name, feature_name, value): if value.ndim == 1: # 这里要改一下 arr = netattr.Attr(value, atlas.get(atlas_name), subject_scan, feature_name) return arr else: net = netattr.Net(value, atlas.get(atlas_name), subject_scan, feature_name) return net def get_dynamic_value(self, data_source, subject_scan, atlas_name, feature_name, window_length, step_size): """ Using data source, scan name, altasobj name, feature name, window length, step size to query dynamic networks and attributes from Redis. If the query succeeds, return a DynamicNet or DynamicAttr class, if not, return none. """ key_all = self.generate_dynamic_key(data_source, subject_scan, atlas_name, feature_name, window_length, step_size) if self.datadb.exists(key_all + ':0'): pipe = self.datadb.pipeline() try: pipe.multi() length = int(self.datadb.get(key_all + ':0').decode()) for i in range(1, length + 1, 1): pipe.get(key_all + ':' + str(i)) res = pipe.execute() except Exception as e: raise Exception( 'An error occur when tring to get value in redis, error message: ' + str(e)) try: pipe.multi() value = [] for i in range(length): value.append(pickle.loads(res[i])) pipe.expire(key_all + ':' + str(i + 1), 1800) pipe.expire(key_all + ':0', 1600) pipe.execute() except Exception as e: raise Exception( 'An error occur when tring to update expiration time in redis, error message: ' + str(e)) return self.trans_dynamic_netattr(subject_scan, atlas_name, feature_name, window_length, step_size, np.array(value)) else: return None def trans_dynamic_netattr(self, subject_scan, atlas_name, feature_name, window_length, step_size, value): if value.ndim == 2: # 这里要改一下 arr = netattr.DynamicAttr(value.swapaxes(0, 1), atlas.get(atlas_name), window_length, step_size, subject_scan, feature_name) return arr else: net = netattr.DynamicNet( value.swapaxes(0, 2).swapaxes(0, 1), atlas.get(atlas_name), window_length, step_size, subject_scan, feature_name) return net def exists_key(self, data_source, subject_scan, atlas_name, feature_name, isdynamic=False, window_length=0, step_size=0): """ Using data source, scan name, atlas name, feature name to check the existence of an static entry in Redis. You can add isdynamic(True), window length, step size to check the existence of an dynamic entry in Redis. """ if isdynamic is False: return self.datadb.exists( self.generate_static_key(data_source, subject_scan, atlas_name, feature_name)) else: return self.datadb.exists( self.generate_dynamic_key(data_source, subject_scan, atlas_name, feature_name, window_length, step_size) + ':0') """ Redis supports storing and querying list as cache. Note: the items in list must be int or float. """ def set_list_all_cache(self, key, value): """ Store a list to Redis as cache with cache_key. Note: please check the existence of the cache_key, or it will cover the origin entry. """ self.cachedb.delete(key) for i in value: self.cachedb.rpush(key, i) #self.cachedb.save() return self.cachedb.llen(key) def set_list_cache(self, key, value): """ Append value to a list as the last one in Redis with cache_key. If the given key is empty in Redis, a new list will be created. """ self.cachedb.rpush(key, value) #self.cachedb.save() return self.cachedb.llen(key) def get_list_cache(self, key, start=0, end=-1): """ Return a list with given cache_key in Redis. """ res = self.cachedb.lrange(key, start, end) lst = [] for x in res: if x.isdigit(): lst.append(int(x)) else: lst.append(float(x)) return lst def exists_key_cache(self, key): """ Check the existence of a list in Redis by cache_key. """ return self.cachedb.exists(key) def delete_key_cache(self, key): """ Delete an entry in Redis by cache_key. If the given key is empty in Redis, do nothing. """ value = self.cachedb.delete(key) #self.cachedb.save() return value def clear_cache(self): """ Delete all the entries in Redis. """ self.cachedb.flushdb() """ Redis supports storing and querying hash. Note: the keys in hash must be string. """ def set_hash_all(self, name, hash): """ Store a hash to Redis with hash_name and a hash. Note: please check the existence of the hash_name, or it will cover the origin hash. """ self.hashdb.delete(name) for i in hash: hash[i] = pickle.dumps(hash[i]) self.hashdb.hmset(name, hash) def set_hash(self, name, item1, item2=''): """ Append an entry/entries to a hash in Redis with hash_name. If the given name is empty in Redis, a new hash will be created. The input format should be as follows: 1.A hash 2.A key and a value """ if type(item1) is dict: for i in item1: item1[i] = pickle.dumps(item1[i]) self.hashdb.hmset(name, item1) else: self.hashdb.hset(name, item1, pickle.dumps(item2)) def get_hash(self, name, keys=[]): """ Support three query functions: 1.Return a hash with a given hash_name in Redis. 2.Return a value_list with a given hash_name and a key_list in Redis, the value_list is the same sequence as key_list. 3.Return a value with a given hash_name and a key in Redis. """ if not keys: res = self.hashdb.hgetall(name) hash = {} for i in res: hash[i.decode()] = pickle.loads(res[i]) return hash else: if type(keys) is list: res = self.hashdb.hmget(name, keys) for i in range(len(res)): res[i] = pickle.loads(res[i]) return res else: return pickle.loads(self.hashdb.hget(name, keys)) def exists_hash(self, name): """ Check the existence of a hash in Redis by hash_name. """ return self.hashdb.exists(name) def exists_hash_key(self, name, key): """ Check the existence of a key in a given hash by key_name and hash_name. """ return self.hashdb.hexists(name, key) def delete_hash(self, name): """ Delete a hash in Redis by hash_name. """ self.hashdb.delete(name) def delete_hash_key(self, name, key): """ Delete a key in a given hash by key_name and hash_name. """ self.hashdb.hdel(name, key) def clear_hash(self): """ Delete all the hashes in Redis by hash_name. """ self.hashdb.flushdb() def flushall(self): self.datadb.flushall()
class DatabaseHandler(object): def __init__(self, db): self.redis = StrictRedis(host='localhost', port=6379, password=password, charset='utf-8', decode_responses=True, db=db) '''find names for argument in data base ''' def _find(self, search): cursor = None names = [] while cursor != 0: if cursor is None: cursor = 0 fined = self.redis.scan(cursor, str(search)) cursor = fined[0] names.extend(fined[1]) return names def find_names(self, find): search = '*' + find + '*' return self._find(search) def exist_name(self, name): return True if self.redis.exists(name) else False def exist_key(self, name, key): return self.redis.hexists(name, key) def get_value_name_key(self, name, key): return self.redis.hget(name, key) def get_all_keys_for_name(self, name): keys = self.redis.hgetall(name) return keys if keys else None def get_keys_for_name(self, name, *args): keys = self.redis.hmget(name, *args) return keys if keys else None '''set a name and key on database''' def set_name_key(self, name, mapping: dict): self.redis.hset(name=name, mapping=mapping) return bool(self.exist_name(name)) '''set a name and key on database''' def del_names(self, names: list): return [self.redis.delete(name) for name in names] '''check if group exist''' def exist_group(self, chat_id): name = 'group:' + str(chat_id) return self.exist_name(name) def update_owner(self, chat_id, user_id): names = self._find('user_url:*' + str(chat_id) + '*') for name in names: name_update = name.split(':') name_update[1] = str(user_id) name_update = ';'.join(name_update) self.redis.rename(name, name_update) '''register or update a url with las_url and last_update''' def update_group(self, chat_id, chat_name, chat_title, user_id, update_owner=None): name = 'group:' + str(chat_id) mapping = { 'chat_adm': str(user_id), 'chat_id': str(chat_id), 'chat_lock': 'True', 'chat_name': chat_name, 'chat_title': str(chat_title) } if update_owner: self.update_owner(chat_id, user_id) return True if self.set_name_key(name=name, mapping=mapping) else False '''check if url exist''' def exist_url(self, url): name = 'url:^' + str(url) + '^' return self.exist_name(name) '''register or update a url with las_url and last_update''' def update_url(self, url, last_update='2000-01-01 00:00:00+00:00', last_url='http://www.exemplo.com'): name = 'url:^' + str(url) + '^' mapping = {'last_update': str(last_update), 'last_url': last_url} return True if self.set_name_key(name=name, mapping=mapping) else False '''check if url exist in chat''' def exist_url_to_chat(self, user_id, chat_id, url): name = 'user_url:' + str(user_id) + ':chat_id:' + str( chat_id) + ':^' + str(url) + '^' return self.exist_name(name) '''register a url for user or group''' def set_url_to_chat(self, chat_id, chat_name, url, user_id): name_url = self.exist_url(url) if not name_url: self.update_url(url=url) name_url_chat = self.exist_url_to_chat(user_id, chat_id, url) if not name_url_chat: name = 'user_url:' + str(user_id) + ':chat_id:' + str( chat_id) + ':^' + str(url) + '^' mapping = { 'chat_id': str(chat_id), 'chat_name': chat_name, 'user_id': str(user_id), 'disable': 'False' } return True if self.set_name_key(name=name, mapping=mapping) else False else: return False '''extract url for name''' @staticmethod def extract_url_from_names(names): if names: uncompress_name = [name.split('^') for name in names] urls = sorted(set(['{}'.format(url[1]) for url in uncompress_name])) return urls return () '''return all url for a chat_id''' def get_chat_urls(self, user_id): names = self._find('user_url:' + str(user_id) + ':*') chat_urls = [] for name in names: keys = self.get_all_keys_for_name(name) chat_id = keys.get('chat_id') chat_name = keys.get('chat_name') user_id = keys.get('user_id') url = self.extract_url_from_names([name])[0] mapping = { 'user_id': str(user_id), 'chat_name': chat_name, 'url': url, 'chat_id': str(chat_id) } chat_urls.append(mapping) return chat_urls '''return info about last update url''' def get_update_url(self, url): name = 'url:^' + str(url) + '^' if self.exist_name(name): keys = self.get_all_keys_for_name(name) last_update = keys.get('last_update') last_url = keys.get('last_url') return {'last_update': last_update, 'last_url': last_url} return False '''return all url activated''' def get_urls_activated(self): names = self._find('user_url*') active_keys = sorted( set([ name for name in names if not self.get_value_name_key(name, 'disable') == 'True' ])) return self.extract_url_from_names(active_keys) '''return names for key 'disable' = 'True' from url''' def get_names_for_user_activated(self, url): names = self._find('user_url*' + url + '*') # name = 'user_url:26072030:chat_id:26072030:^http://g1.globo.com/dynamo/economia/rss2.xml^' # print(self.get_value_name_key(name, 'disable') == 'False') active_names = sorted( set([ name for name in names if self.get_value_name_key(name, 'disable') == 'False' ])) return active_names '''return all url activated''' def get_chat_id_for_chat_name(self, user_id, chat_name): names = self._find('user_url:*' + str(user_id) + '*') for name in names: chat_name_db = self.get_value_name_key(name, 'chat_name') chat_id_db = self.get_value_name_key(name, 'chat_id') if chat_name_db == chat_name and chat_id_db: return chat_id_db return None '''disable url for chat''' def disable_url_chat(self, chat_id): names = self._find('user_url:*chat_id:' + str(chat_id) + '*') mapping = {'disable': 'True'} disables = [ self.set_name_key(name=name, mapping=mapping) for name in names ] if names else [] return disables # return [disable[1] for disable in disables if disable[0]] def del_url_for_chat(self, chat_id, url): names = self._find('user_url:*' + str(chat_id) + '*' + url + '*') result = self.del_names(names) return True if result[0] == 1 else None
class RedisConn(object): """docstring for RedisConn""" def __init__(self, startup_nodes=None, host="localhost", port=6379, db=0, password=None, encoding='utf-8', socket_keepalive=False, connection_pool=None, max_connections=None, project="", decode_responses=True, **kwargs): if project: project = f'{project}:' self.cluster_flag = False self.project = project if startup_nodes: from rediscluster import StrictRedisCluster if isinstance(startup_nodes, (str, bytes)): startup_nodes = _normalize_startup_nodes(startup_nodes) self._redis = StrictRedisCluster(startup_nodes=startup_nodes, decode_responses=decode_responses, skip_full_coverage_check=True, **kwargs) self.cluster_flag = True else: self._redis = StrictRedis(host=host, port=port, db=db, password=password, socket_keepalive=socket_keepalive, connection_pool=connection_pool, max_connections=max_connections, **kwargs) def add_head(self, key): return f'{self.project}{key}' def format_key(): def make_wrapper(func): def wrapper(self, key, *args, **kwargs): new_key = self.add_head(key) return func(self, new_key, *args, **kwargs) return wrapper return make_wrapper def format_key_keys(): def make_wrapper(func): def wrapper(self, key, keys, *args, **kwargs): new_key = self.add_head(key) new_keys = list(map(self.add_head, keys)) return func(self, new_key, new_keys, *args, **kwargs) return wrapper return make_wrapper def format_args(): def make_wrapper(func): def wrapper(self, *args, **kwargs): new_args = list(map(self.add_head, list(args))) return func(self, *new_args, **kwargs) return wrapper return make_wrapper def format_two_key(): def make_wrapper(func): def wrapper(self, src, dst, *args, **kwargs): new_src = self.add_head(src) new_dst = self.add_head(dst) return func(self, new_src, new_dst, *args, **kwargs) return wrapper return make_wrapper def format_keys(): def make_wrapper(func): def wrapper(self, keys, *args): new_keys = list(map(self.add_head, keys)) return func(self, new_keys, *args) return wrapper return make_wrapper def format_dicts(): def make_wrapper(func): def wrapper(self, mapping, *args): new_mapping = {} for key in mapping.keys(): new_key = self.add_head(key) new_mapping[new_key] = mapping[key] return func(self, new_mapping, *args) return wrapper return make_wrapper @format_args() def unlink(self, *keys): """ time complexity O(1) redis异步删除keys """ return self._redis.unlink(*keys) def pipeline(self, transaction=True, shard_hint=None): """ 返回一个pipe对象 """ return self._redis.pipeline(transaction, shard_hint) """===============================string-start==========================""" # } @format_key() def set(self, key, value, ex=None, px=None, nx=False, xx=False): """ time complexity O(1) Set the value at key ``key`` to ``value`` Arguments: key (str): key key value (str): key value ex(int): 过期时间(秒) px(int): 过期时间(豪秒) nx(bool): 如果设置为True,则只有key不存在时,当前set操作才执行(新建) xx(bool): 如果设置为True,则只有key存在时,当前set操作才执行 (修改) Returns: result(bool): 是否成功成功是True失败可能是None """ return self._redis.set(key, value, ex, px, nx, xx) @format_key() def get(self, key): """ time complexity O(1) Return the value at ``key``, or None if the key doesn't exist Arguments: key (str): key Returns: value (str):返回value """ return self._redis.get(key) @format_key() def getset(self, key, value): """ time complexity O(1) 设置新值并获取原来的值 """ return self._redis.getset(key, value) @format_key() def strlen(self, key): """ time complexity O(1) 获得key对应的value长度 """ return self._redis.strlen(key) @format_key() def getrange(self, key, start, end): """ time complexity O(1) 获得key对应的value的start到end长度字符返回 """ return self._redis.getrange(key, start, end) @format_key() def setrange(self, key, offset, value): """ time complexity O(1) 设置key对应的value从offset地方用新value替换 """ return self._redis.setrange(key, offset, value) @format_key() def setbit(self, key, offset, value): """ time complexity O(1) value值只能是1或0 设置key对应的value二进制在offset位用value替换 """ return self._redis.setbit(key, offset, value) @format_key() def getbit(self, key, offset): """ time complexity O(1) 获取key对应的value二进制在offset位的值 """ return self._redis.getbit(key, offset) @format_key() def expire(self, key, time): """ time complexity O(1) 设置key的过期时间s """ return self._redis.expire(key, time) @format_key() def pexpire(self, key, time): """ time complexity O(1) 设置key的过期时间ms """ return self._redis.pexpire(key, time) @format_key() def pexpireat(self, key, when): """ time complexity O(1) 设置key的过期时间(在什么时候过期) when是uninx的时间戳ms """ return self._redis.pexpireat(key, when) @format_key() def pttl(self, key): """ time complexity O(1) 获得key过期时间(ms),没有设置过期时间返回-1 """ return self._redis.pttl(key) @format_key() def ttl(self, key): """ time complexity O(1) 获得name过期时间(s),没有设置过期时间返回-1 """ return self._redis.ttl(key) @format_dicts() def mset(self, mapping): """ time complexity O(n) Arguments: mapping (dict): {name: value,name1: value1} Returns: return ok """ return self._redis.mset(mapping) @format_dicts() def msetnx(self, mapping): """ time complexity O(n) Arguments: mapping (dict): {name: value,name1: value1} Returns: return (bool): 与mset区别是指定的key中有任意一个已存在,则不进行任何操作,返回错误 """ return self._redis.msetnx(mapping) @format_keys() def mget(self, keys, *args): """ time complexity O(n) Arguments: keys (list): [name, name1] Returns: return (list): 返回对应keys的value, name在数据库不存在返回None Mind!: 一次性取多个key确实比get提高了性能,但是mget的时间复杂度O(n), 实际使用过程中测试当key的数量到大于100之后性能会急剧下降, 建议mget每次key数量不要超过100。在使用前根据实列的redis吞吐量可能会不一样。 """ return self._redis.mget(keys, *args) @format_key() def incr(self, key, amount=1): """ time complexity O(1) 将key对应的value值自增amount,并返回自增后的值。只对可以转换为整型的String数据起作用。 用于统计sql型数据库大表里面的数据量 """ return self._redis.incr(key, amount) @format_key() def incrbyfloat(self, key, amount=1.0): """ time complexity O(1) amount 可以为负数代表减法 将key对应的value值自增amount,并返回自增后的值。只对可以转换为float的String数据起作用。 用于统计sql型数据库大表里面的数据量 """ return self._redis.incrbyfloat(key, amount) @format_key() def decr(self, key, amount=1): """ time complexity O(1) 将key对应的value值自减amount,并返回自减后的值。只对可以转换为整型的String数据起作用。 用于统计sql型数据库大表里面的数据量 """ return self._redis.decr(key, amount) def keys(self, pattern='*'): """ time complexity O(n) 获取匹配pattern的所有key.实际项目中慎用 """ return self._redis.keys(pattern) @format_key() def move(self, key, db): """ time complexity O(1) 移动key到其他db """ return self._redis.move(key, db) def randomkey(self): """ time complexity O(1) 随机返回一个key """ return self._redis.randomkey() @format_args() def rename(self, src, dst): """ time complexity O(1) 重命名key src to dst """ return self._redis.rename(src, dst) @format_args() def exists(self, *keys): """ time complexity O(1) 查看keys是否存在返回存在的key数量 """ return self._redis.exists(*keys) @format_args() def delete(self, *keys): """ time complexity O(1) 删除keys """ return self._redis.delete(*keys) @format_key() def type(self, key): """ time complexity O(1) 查看key对应value类型 """ return self._redis.type(key) # { """===============================string-end============================""" """===============================list-start============================""" # } @format_keys() def blpop(self, keys, timeout=0): """ 如果keys里面有list为空要求整个服务器被阻塞以保证块执行时的原子性, 该行为阻止了其他客户端执行 LPUSH 或 RPUSH 命令 阻塞的一个命令,用来做轮询和会话配合使用 Arguments: keys(list): [keys, keys] timeout(int): S """ return self._redis.blpop(keys, timeout) @format_keys() def brpop(self, keys, timeout=0): """ 同上,取数据的方向不同 """ return self._redis.brpop(keys, timeout) @format_two_key() def brpoplpush(self, src, dst, timeout=0): """ 从src表尾取一个数据插入dst表头。同上src为空阻塞 """ return self._redis.brpoplpush(src, dst, timeout) @format_key() def lpush(self, key, *values): """ time complexity O(n) Set the value at key ``key`` to ``value`` Arguments: key (str): key key value (list): key value Returns: result(int): 插入成功之后list长度 """ return self._redis.lpush(key, *values) @format_key() def lpushx(self, key, *values): """ time complexity O(n) only key not exists Arguments: key (str): key value (list): key value Returns: result(int): 插入成功之后list长度 """ return self._redis.lpushx(key, *values) @format_key() def lpop(self, key): """ time complexity O(1) 移除并返回列表 key 的头元素。 """ return self._redis.lpop(key) @format_key() def rpush(self, key, *values): """ time complexity O(n) Set the value at key ``key`` to ``value`` Arguments: key (str): key key value (list): key value Returns: result(int): 插入成功之后list长度 """ return self._redis.rpush(key, *values) @format_key() def rpushx(self, key, *values): """ time complexity O(n) only key not exists Arguments: key (str): key value (list): key value Returns: result(int): 插入成功之后list长度 """ return self._redis.rpushx(key, *values) @format_key() def rpop(self, key): """ time complexity O(1) 移除并返回列表 key尾元素。 """ return self._redis.rpop(key) @format_key() def lrange(self, key, start, end): """ time complexity O(n) 获取list数据包含start,end.在不清楚list的情况下尽量不要使用lrange(key, 0, -1)操作 应尽可能控制一次获取的元素数量 """ return self._redis.lrange(key, start, end) @format_args() def rpoplpush(self, src, dst): """ 从src表尾取一个数据插入dst表头 """ return self._redis.rpoplpush(src, dst) @format_key() def llen(self, key): """ time complexity O(1) 获取list长度,如果key不存在返回0,如果key不是list类型返回错误 """ return self._redis.llen(key) @format_key() def lindex(self, key, index): """ time complexity O(n) n为经过的元素数量 返回key对应list的index位置的value """ return self._redis.lindex(key, index) @format_key() def linsert(self, key, where, refvalue, value): """ time complexity O(n) n为经过的元素数量 key或者refvalue不存在就不进行操作 Arguments: where(str): BEFORE|AFTER 后|前 refvalue(str): list里面的值 """ return self._redis.linsert(key, where, refvalue, value) @format_key() def lrem(self, key, count, value): """ time complexity O(n) 删除count数量的value Arguments: count(int): count>0 表头开始搜索 count<0 表尾开始搜索 count=0 删除所有与value相等的数值 Returns: result(int): 删除的value的数量 """ if self.cluster_flag: return self._redis.lrem(key, value, count) return self._redis.lrem(key, count, value) @format_key() def lset(self, key, index, value): """ time complexity O(n) 设置list的index位置的值,没有key和超出返回错误 """ return self._redis.lset(key, index, value) @format_key() def ltrim(self, key, start, end): """ time complexity O(n) n为被删除的元素数量 裁剪让列表只保留指定区间内的元素,不在指定区间之内的元素都将被删除。 """ return self._redis.ltrim(key, start, end) @format_key() def sort(self, key, start=None, num=None, by=None, get=None, desc=False, alpha=False, store=None, groups=False): """ time complexity O(n) O(N+M*log(M)), N 为要排序的列表或集合内的元素数量, M 为要返回的元素数量。 删除count数量的value Arguments: by(str): 让排序按照外部条件排序, 可以先将权重插入redis然后再作为条件进行排序如(user_level_*) get(str): redis有一组user_name_*然后*是按照list里面的值, 按照排序取一个个key的value store(str): 保留sort之后的结果,可以设置expire过期时间作为结果缓存 alpha: 按照字符排序 desc: 逆序 Returns: result(list): 排序之后的list """ return self._redis.sort(key, start, num, by, get, desc, alpha, store, groups) def scan(self, cursor=0, match=None, count=None): """ time complexity O(1) 单次 增量迭代返回redis数据库里面的key,因为是增量迭代过程中返回可能会出现重复 Arguments: cursor(int): 游标 match(str): 匹配 count(int): 每次返回的key数量 Returns: result(set): 第一个是下次scan的游标,后面是返回的keys(list)当返回的游标为0代表遍历完整个redis """ return self._redis.scan(cursor, match, count) # { """===============================list-end====================================""" """===============================hash-start===================================""" # } @format_key() def hdel(self, key, *names): """ time complexity O(n) n为names长度 Return the value at ``key``, or None if the key doesn't exist Arguments: key (str): key names(list): hash里面的域 Returns: result (int): 成功删除的个数 """ return self._redis.hdel(key, *names) @format_key() def hexists(self, key, name): """ time complexity O(1) 判断key中是否有name域 """ return self._redis.hexists(key, name) @format_key() def hget(self, key, name): """ time complexity O(1) """ return self._redis.hget(key, name) @format_key() def hgetall(self, key): """ time complexity O(n) """ return self._redis.hgetall(key) @format_key() def hincrby(self, key, name, amount=1): """ time complexity O(1) amount可以为负数,且value值为整数才能使用否则返回错误 """ return self._redis.hincrby(key, name, amount) @format_key() def hincrbyfloat(self, key, name, amount=1.0): """ time complexity O(1) """ return self._redis.hincrbyfloat(key, name, amount) @format_key() def hkeys(self, key): """ time complexity O(n) """ return self._redis.hkeys(key) @format_key() def hlen(self, key): """ time complexity O(1) """ return self._redis.hlen(key) @format_key() def hset(self, key, name, value): """ time complexity O(1) """ return self._redis.hset(key, name, value) @format_key() def hsetnx(self, key, name, value): """ time complexity O(1) """ return self._redis.hsetnx(key, name, value) @format_key() def hmset(self, key, mapping): """ time complexity O(n) """ return self._redis.hmset(key, mapping) @format_key() def hmget(self, key, names, *args): """ time complexity O(n) """ return self._redis.hmget(key, names, *args) @format_key() def hvals(self, key): """ time complexity O(n) 返回hash表所有的value """ return self._redis.hvals(key) @format_key() def hstrlen(self, key, name): """ time complexity O(1) """ return self._redis.hstrlen(key, name) # { """=================================hash-end===================================""" """=================================set-start=================================""" # } @format_key() def sadd(self, key, *values): """ time complexity O(n) n为values长度 """ return self._redis.sadd(key, *values) @format_key() def scard(self, key): """ time complexity O(n) set长度 返回set大小 """ return self._redis.scard(key) @format_args() def sdiff(self, key, *args): """ time complexity O(n) N 是所有给定集合的成员数量之和 返回差集成员的列表。 """ return self._redis.sdiff(key, *args) @format_args() def sdiffstore(self, dest, keys, *args): """ time complexity O(n) N 是所有给定集合的成员数量之和 返回差集成员的数量。并将结果保存到dest这个set里面 """ return self._redis.sdiffstore(dest, keys, *args) @format_args() def sinter(self, key, *args): """ time complexity O(N * M), N 为给定集合当中基数最小的集合, M 为给定集合的个数。 返回交集数据的list """ return self._redis.sinter(key, *args) @format_args() def sinterstore(self, dest, keys, *args): """ time complexity O(n) N 是所有给定集合的成员数量之和 返回交集成员的数量。并将结果保存到dest这个set里面 """ return self._redis.sinterstore(dest, keys, *args) @format_key() def sismember(self, key, name): """ time complexity O(1) 判断name是否在key中 """ return self._redis.sismember(key, name) @format_key() def smembers(self, key): """ time complexity O(n) 返回set里面所有成员 """ return self._redis.smembers(key) @format_two_key() def smove(self, src, dst, value): """ time complexity O(1) 将value从src移动到dst原子性操作 """ return self._redis.smove(src, dst, value) @format_key() def spop(self, key, count=None): """ time complexity O(n) n 默认随机删除一条, 删除count条 """ return self._redis.spop(key, count) @format_key() def srandmember(self, key, number=None): """ time complexity O(n) n 默认随机返回一条, 返回number条 """ return self._redis.srandmember(key, number) @format_key() def srem(self, key, *values): """ time complexity O(n) n为values长度 移除key里面values """ return self._redis.srem(key, *values) @format_args() def sunion(self, keys, *args): """ time complexity O(N), N 是所有给定集合的成员数量之和 返回并集 """ return self._redis.sunion(keys, *args) @format_args() def sunionstore(self, dest, keys, *args): """ time complexity O(N), N 是所有给定集合的成员数量之和。 求并集并保存 """ return self._redis.sunionstore(dest, keys, *args) @format_key() def sscan(self, key, cursor=0, match=None, count=None): """ time complexity O(1) 同scan只是这个是set使用 """ return self._redis.sscan(key, cursor, match, count) # { """==================================set-end==================================""" """===============================SortedSet-start=============================""" # } @format_key() def zadd(self, key, mapping, nx=False, xx=False, ch=False, incr=False): """ time complexity O(M*log(N)), N 是有序集的基数, M 为成功添加的新成员的数量。 Arguments: mapping(dict): (value:score) XX(bool): 仅仅更新存在的成员,不添加新成员。 NX(bool): 不更新存在的成员。只添加新成员。 CH(bool): 修改返回值为发生变化的成员总数,原始是返回新添加成员的总数 (CH 是 changed 的意思)。 更改的元素是新添加的成员,已经存在的成员更新分数。 所以在命令中指定的成员有相同的分数将不被计算在内。 注:在通常情况下,ZADD返回值只计算新添加成员的数量。 INCR(bool): 当ZADD指定这个选项时,成员的操作就等同ZINCRBY命令,对成员的分数进行递增操作。 Returns: result(int): 成功插入数量 """ if self.cluster_flag: return self._redis.zadd(key, **mapping) return self._redis.zadd(key, mapping, nx, xx, ch, incr) @format_key() def zcard(self, key): """ time complexity O(1) 返回zset()基数 """ return self._redis.zcard(key) @format_key() def zcount(self, key, minz, maxz): """ time complexity O(log(N)), N 为有序集的基数。 返回score在min和max之间的value的个数 """ return self._redis.zcount(key, minz, maxz) @format_key() def zincrby(self, key, amount, value): """ time complexity O(log(N)), N 为有序集的基数。 amount 可以为负数 """ if self.cluster_flag: return self._redis.zincrby(key, value, amount) return self._redis.zincrby(key, amount, value) @format_key_keys() def zinterstore(self, dest, keys, aggregate=None): """ time complexity O(N*K)+O(M*log(M)), N 为给定 key 中基数最小的有序集, K 为给定有序集的数量, M 为结果集的基数。 求交集并按照aggregate做处理之后保存到dest。默认是求和 Arguments: aggregate(str):sum 和, min 最小值, max 最大值 返回新zset里面的value个数 """ return self._redis.zinterstore(dest, keys, aggregate) @format_key() def zrange(self, key, start, end, desc=False, withscores=False, score_cast_func=float): """ time complexity O(log(N)+M), N 为有序集的基数,而 M 为结果集的基数。 Arguments: start,有序集合索引起始位置(非分数) end,有序集合索引结束位置(非分数) desc,排序规则,默认按照分数从小到大排序 withscores,是否获取元素的分数,默认只获取元素的值 score_cast_func,对分数进行数据转换的函数 """ return self._redis.zrange(key, start, end, desc, withscores, score_cast_func) @format_key() def zrevrange(self, key, start, end, withscores=False, score_cast_func=float): """ time complexity O(log(N)+M), N 为有序集的基数,而 M 为结果集的基数。 Arguments: start,有序集合索引起始位置(非分数) end,有序集合索引结束位置(非分数) withscores,是否获取元素的分数,默认只获取元素的值 score_cast_func,对分数进行数据转换的函数 """ return self._redis.zrevrange(key, start, end, withscores, score_cast_func) @format_key() def zrangebyscore(self, key, minz, maxz, start=None, num=None, withscores=False, score_cast_func=float): """ time complexity O(log(N)+M), N 为有序集的基数,而 M 为结果集的基数。 有序集成员按 score 值递增(从小到大)次序排列。 """ return self._redis.zrangebyscore(key, minz, maxz, start, num, withscores, score_cast_func) @format_key() def zrevrangebyscore(self, key, minz, maxz, start=None, num=None, withscores=False, score_cast_func=float): """ time complexity O(log(N)+M), N 为有序集的基数,而 M 为结果集的基数。 有序集成员按 score 值递减(从大到小)次序排列。 """ return self._redis.zrevrangebyscore(key, minz, maxz, start, num, withscores, score_cast_func) @format_key() def zrangebylex(self, key, minz, maxz, start=None, num=None): """ time complexity O(log(N)+M), N 为有序集的基数,而 M 为结果集的基数。 有序集成员按 value 字典序递增(从小到大)次序排列。 """ return self._redis.zrangebylex(key, minz, maxz, start, num) @format_key() def zrevrangebylex(self, key, minz, maxz, start=None, num=None): """ time complexity O(log(N)+M), N 为有序集的基数,而 M 为结果集的基数。 有序集成员按 value 字典序递减(从大到小)次序排列。 """ return self._redis.zrevrangebylex(key, minz, maxz, start, num) @format_key() def zrank(self, key, value): """ time complexity O(log(N)) 查找zset里面这个value的rank排名从0开始 """ return self._redis.zrank(key, value) @format_key() def zrevrank(self, key, value): """ time complexity O(log(N)) 查找zset里面这个value的rank排名从0开始 """ return self._redis.zrevrank(key, value) @format_key() def zrem(self, key, *values): """ time complexity O(M*log(N)), N 为有序集的基数, M 为被成功移除的成员的数量 删除zset里面单个或者多个成员 """ return self._redis.zrem(key, *values) @format_key() def zremrangebylex(self, key, minz, maxz): """ time complexity O(log(N)+M), N 为有序集的基数,而 M 为被移除成员的数量。 按照字典增序范围删除 """ return self._redis.zremrangebylex(key, minz, maxz) @format_key() def zremrangebyrank(self, key, minz, maxz): """ time complexity O(log(N)+M), N 为有序集的基数,而 M 为被移除成员的数量。 按照rank范围删除 """ return self._redis.zremrangebyrank(key, minz, maxz) @format_key() def zremrangebyscore(self, key, minz, maxz): """ time complexity O(log(N)+M), N 为有序集的基数,而 M 为被移除成员的数量。 按照score范围删除 """ return self._redis.zremrangebyscore(key, minz, maxz) @format_key() def zscore(self, key, value): """ time complexity O(log(N)) 查找zset里面这个value的score排名从0开始 """ return self._redis.zscore(key, value) @format_key_keys() def zunionstore(self, dest, keys, aggregate=None): """ time complexity O(N)+O(M log(M)), N 为给定有序集基数的总和, M 为结果集的基数。 求并集保存 """ return self._redis.zunionstore(dest, keys, aggregate) @format_key() def zscan(self, key, cursor=0, match=None, count=None, score_cast_func=float): """ time complexity O(1) 同SCAN """ return self._redis.zscan(key, cursor, match, count, score_cast_func) def zlexcount(self, key, minz, maxz): """ time complexity O(log(N)),其中 N 为有序集合包含的元素数量。 min -负无限 [闭空间不包括自己 (开空间包括自己 max +正无限 [a, (c """ return self._redis.zlexcount(key, minz, maxz) # { """===============================SortedSet-end=================================""" """===============================HyperLogLog-start===============================""" # } @format_key() def pfadd(self, key, *values): """ time complexity O(n) """ return self._redis.pfadd(key, *values) @format_args() def pfcount(self, *sources): """ time complexity O(1) 计算key的基数 """ return self._redis.pfcount(*sources) @format_args() def pfmerge(self, dest, *sources): """ time complexity O(n) 其中 N 为被合并的 HyperLogLog 数量,不过这个命令的常数复杂度比较高 合并HyperLogLog """ return self._redis.pfmerge(dest, *sources) # { """===============================HyperLogLog-end=================================""" """==================================GEO-start====================================""" # } @format_key() def geoadd(self, key, *values): """ time complexity O(log(N)) 每添加一个元素的复杂度为 O(log(N)) , 其中 N 为键里面包含的位置元素数量。 """ return self._redis.geoadd(key, *values) @format_key() def geopos(self, key, *values): """ time complexity O(log(N)) 从键里面返回所有给定位置元素的位置(经度和纬度)。 """ return self._redis.geopos(key, *values) @format_key() def geohash(self, key, *values): """ time complexity O(log(N)) 命令返回的 geohash 的位置与用户给定的位置元素的位置一一对应 """ return self._redis.geohash(key, *values) @format_key() def geodist(self, key, place1, place2, unit=None): """ time complexity O(log(N)) 返回两个给定位置之间的距离。 Argument: unit : m: 米,km: 千米,mi: 英里,ft: 英尺 """ return self._redis.geodist(key, place1, place2, unit) @format_key() def georadius(self, key, longitude, latitude, radius, unit=None, withdist=False, withcoord=False, withhash=False, count=None, sort=None, store=None, store_dist=None): """ time complexity O(N+log(M)), 其中 N 为指定半径范围内的位置元素数量, 而 M 则是被返回位置元素的数量。 以给定的经纬度为中心, 返回键包含的位置元素当中, 与中心的距离不超过给定最大距离的所有位置元素。 Argument: longitude: 经度 latitude: 纬度 radius: 距离 unit: 距离单位 withdist: 在返回位置元素的同时, 将位置元素与中心之间的距离也一并返回。 距离的单位和用户给定的范围单位保持一致。 withcoord: 将位置元素的经度和维度也一并返回 withhash: 以 52 位有符号整数的形式, 返回位置元素经过原始 geohash 编码的有序集合分值。 这个选项主要用于底层应用或者调试, 实际中的作用并不大。 sort: 根据中心的位置排序 ASC,DESC count: 取前多少个 store: 保存 store_dist: 存储地名和距离 Return: list(list) [['Foshan', 109.4922], ['Guangzhou', 105.8065]] """ return self._redis.georadius(key, longitude, latitude, radius, unit, withdist, withcoord, withhash, count, sort, store, store_dist) @format_key() def georadiusbymember(self, key, member, radius, unit=None, withdist=False, withcoord=False, withhash=False, count=None, sort=None, store=None, store_dist=None): """ time complexity O(N+log(M)), 其中 N 为指定半径范围内的位置元素数量, 而 M 则是被返回位置元素的数量。 以给定的经纬度为中心, 返回键包含的位置元素当中, 与中心的距离不超过给定最大距离的所有位置元素。 Argument: member: 位置元素 radius: 距离 unit: 距离单位 withdist: 在返回位置元素的同时, 将位置元素与中心之间的距离也一并返回。 距离的单位和用户给定的范围单位保持一致。 withcoord: 将位置元素的经度和维度也一并返回 withhash: 以 52 位有符号整数的形式, 返回位置元素经过原始 geohash 编码的有序集合分值。 这个选项主要用于底层应用或者调试, 实际中的作用并不大。 sort: 根据中心的位置排序 ASC,DESC count: 取前多少个 store: 保存 store_dist: 存储地名和距离 Return: list(list) [['Foshan', 109.4922], ['Guangzhou', 105.8065]] """ return self._redis.georadiusbymember(key, member, radius, unit, withdist, withcoord, withhash, count, sort, store, store_dist) # { """==================================GEO-end======================================"""
class RedisBackendTest(TestCase): def setUp(self): self.backend = RedisBackend() self.redis = StrictRedis() def test_get_source_key(self): self.assertEqual(self.backend.get_source_key("a.jpg"), "djthumbs-test:sources:a.jpg") def test_get_thumbnail_key(self): self.assertEqual(self.backend.get_thumbnail_key("a.jpg"), "djthumbs-test:thumbnails:a.jpg") def test_add_delete_source(self): source_name = "test-thumbnail.jpg" source_key = self.backend.get_source_key(source_name) self.backend.add_source(source_name) self.assertTrue(self.redis.hexists(source_key, source_name)) self.backend.delete_source(source_name) self.assertFalse(self.redis.hexists(source_key, source_name)) def test_get_source(self): source_name = "test-thumbnail.jpg" source_key = self.backend.get_source_key(source_name) self.redis.hset(source_key, source_name, source_name) self.assertEqual(self.backend.get_source(source_name), source_name) # Delete Source self.redis.hdel(source_key, source_name) def test_add_delete_thumbnail(self): source_name = "test-thumbnail.jpg" size = "small" thumbnail_key = self.backend.get_thumbnail_key(source_name) self.backend.add_source(source_name) self.backend.add_thumbnail(source_name, size, "test-thumbnail_small.jpg") self.assertTrue(self.redis.hexists(thumbnail_key, size)) self.backend.delete_thumbnail(source_name, size) self.assertFalse(self.redis.hexists(thumbnail_key, size)) # Delete Source self.redis.hdel(self.backend.get_source_key(source_name), source_name) def test_get_thumbnail(self): source_name = "test-thumbnail.jpg" self.backend.add_source(source_name) self.backend.add_thumbnail(source_name, "small", "test-thumbnail_small.jpg") self.assertEqual( self.backend.get_thumbnail(source_name, "small"), ImageMeta(source_name, "test-thumbnail_small.jpg", "small"), ) self.backend.add_thumbnail(source_name, "large", "test-thumbnail_large.jpg") expected = ["test-thumbnail_large.jpg", "test-thumbnail_small.jpg"] result = [image_meta.name for image_meta in self.backend.get_thumbnails(source_name)] self.assertEqual(result.sort(), expected.sort()) # Delete Source & Thumbnails thumbnail_key = self.backend.get_thumbnail_key(source_name) self.redis.hdel(self.backend.get_source_key(source_name), source_name) self.redis.hdel(thumbnail_key, "small") self.redis.hdel(thumbnail_key, "large")
class RedisJobStore(BaseJobStore): """ Stores jobs in a Redis database. Any leftover keyword arguments are directly passed to redis's StrictRedis. Plugin alias: ``redis`` :param int db: the database number to store jobs in :param str jobs_key: key to store jobs in :param str run_times_key: key to store the jobs' run times in :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the highest available """ def __init__(self, db=0, jobs_key='apscheduler.jobs', run_times_key='apscheduler.run_times', pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): super(RedisJobStore, self).__init__() if db is None: raise ValueError('The "db" parameter must not be empty') if not jobs_key: raise ValueError('The "jobs_key" parameter must not be empty') if not run_times_key: raise ValueError('The "run_times_key" parameter must not be empty') self.pickle_protocol = pickle_protocol self.jobs_key = jobs_key self.run_times_key = run_times_key self.redis = StrictRedis(db=int(db), **connect_args) def lookup_job(self, job_id): job_state = self.redis.hget(self.jobs_key, job_id) return self._reconstitute_job(job_state) if job_state else None def get_due_jobs(self, now): timestamp = datetime_to_utc_timestamp(now) job_ids = self.redis.zrangebyscore(self.run_times_key, 0, timestamp) if job_ids: job_states = self.redis.hmget(self.jobs_key, *job_ids) return self._reconstitute_jobs(six.moves.zip(job_ids, job_states)) return [] def get_next_run_time(self): next_run_time = self.redis.zrange(self.run_times_key, 0, 0, withscores=True) if next_run_time: return utc_timestamp_to_datetime(next_run_time[0][1]) def get_all_jobs(self): job_states = self.redis.hgetall(self.jobs_key) jobs = self._reconstitute_jobs(six.iteritems(job_states)) paused_sort_key = datetime(9999, 12, 31, tzinfo=utc) return sorted(jobs, key=lambda job: job.next_run_time or paused_sort_key) def add_job(self, job): if self.redis.hexists(self.jobs_key, job.id): raise ConflictingIdError(job.id) with self.redis.pipeline() as pipe: pipe.multi() pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), self.pickle_protocol)) if job.next_run_time: pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id) pipe.execute() def update_job(self, job): if not self.redis.hexists(self.jobs_key, job.id): raise JobLookupError(job.id) with self.redis.pipeline() as pipe: pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), self.pickle_protocol)) if job.next_run_time: pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id) else: pipe.zrem(self.run_times_key, job.id) pipe.execute() def remove_job(self, job_id): if not self.redis.hexists(self.jobs_key, job_id): raise JobLookupError(job_id) with self.redis.pipeline() as pipe: pipe.hdel(self.jobs_key, job_id) pipe.zrem(self.run_times_key, job_id) pipe.execute() def remove_all_jobs(self): with self.redis.pipeline() as pipe: pipe.delete(self.jobs_key) pipe.delete(self.run_times_key) pipe.execute() def shutdown(self): self.redis.connection_pool.disconnect() def _reconstitute_job(self, job_state): job_state = pickle.loads(job_state) job = Job.__new__(Job) job.__setstate__(job_state) job._scheduler = self._scheduler job._jobstore_alias = self._alias return job def _reconstitute_jobs(self, job_states): jobs = [] failed_job_ids = [] for job_id, job_state in job_states: try: jobs.append(self._reconstitute_job(job_state)) except: self._logger.exception('Unable to restore job "%s" -- removing it', job_id) failed_job_ids.append(job_id) # Remove all the jobs we failed to restore if failed_job_ids: with self.redis.pipeline() as pipe: pipe.hdel(self.jobs_key, *failed_job_ids) pipe.zrem(self.run_times_key, *failed_job_ids) pipe.execute() return jobs def __repr__(self): return '<%s>' % self.__class__.__name__
class DatabaseHandler(object): def __init__(self, db): self.redis = StrictRedis(host='localhost', port=6379, password=password, charset='utf-8', decode_responses=True, db=db) '''find names for argument in data base ''' def _find(self, search): cursor = None names = [] while cursor != 0: if cursor is None: cursor = 0 fined = self.redis.scan(cursor, str(search)) cursor = fined[0] names.extend(fined[1]) return sorted(set(names)) def find_names(self, find): search = '*' + find + '*' return self._find(search) def exist_name(self, name): return True if self.redis.exists(name) else False def exist_key(self, name, key): return self.redis.hexists(name, key) def get_value_name_key(self, name, key): return self.redis.hget(name, key) def get_all_keys_for_name(self, name): keys = self.redis.hgetall(name) return keys if keys else None def get_keys_for_name(self, name, *args): keys = self.redis.hmget(name, *args) return keys if keys else None '''set a name and key on database''' def set_name_key(self, name, mapping: dict): self.redis.hset(name=name, mapping=mapping) return bool(self.exist_name(name)) '''set a name and key on database''' def del_names(self, names: list): return [self.redis.delete(name) for name in names] '''check if group exist''' def exist_group(self, chat_id): name = 'group:' + str(chat_id) return self.exist_name(name) def update_owner(self, chat_id, user_id): names = self._find('user_url:*' + str(chat_id) + '*') for name in names: name_update = name.split(':') name_update[1] = str(user_id) name_update = ';'.join(name_update) self.redis.rename(name, name_update) '''register or update a url with las_url and last_update''' def update_group(self, chat_id, chat_name, chat_title, user_id, update_owner=None): name = 'group:' + str(chat_id) mapping = { 'chat_adm': str(user_id), 'chat_id': str(chat_id), 'chat_lock': 'True', 'chat_name': chat_name, 'chat_quiet': 'True', 'chat_title': str(chat_title), 'chat_welcome': 'False', 'chat_goodbye': 'False' } if update_owner: self.update_owner(chat_id, user_id) return True if self.set_name_key(name=name, mapping=mapping) else False '''check if url exist''' def exist_url(self, url): name = 'url:^' + str(url) + '^' return self.exist_name(name) '''register or update a url with las_url and last_update''' def update_url(self, url, last_update='2000-01-01 00:00:00+00:00', last_url='http://www.exemplo.com'): name = 'url:^' + str(url) + '^' mapping = {'last_update': str(last_update), 'last_url': last_url} return True if self.set_name_key(name=name, mapping=mapping) else False '''check if url exist in chat''' def exist_url_to_chat(self, user_id, chat_id, url): name = 'user_url:' + str(user_id) + ':chat_id:' + str( chat_id) + ':^' + str(url) + '^' return self.exist_name(name) '''register a url for user or group''' def set_url_to_chat(self, chat_id, chat_name, url, user_id): name_url = self.exist_url(url) if not name_url: self.update_url(url=url) name_url_chat = self.exist_url_to_chat(user_id, chat_id, url) if not name_url_chat: name = 'user_url:' + str(user_id) + ':chat_id:' + str( chat_id) + ':^' + str(url) + '^' mapping = { 'chat_id': str(chat_id), 'chat_name': chat_name, 'user_id': str(user_id), 'disable': 'False' } return True if self.set_name_key(name=name, mapping=mapping) else False else: return False '''return info about last update url''' def get_update_url(self, url): name = 'url:^' + str(url) + '^' if self.exist_name(name): keys = self.get_all_keys_for_name(name) last_update = keys.get('last_update') last_url = keys.get('last_url') return {'last_update': last_update, 'last_url': last_url} return False @staticmethod def extract_url_from_names(names): if names: uncompress_name = [name.split('^') for name in names] urls = sorted(set(['{}'.format(url[1]) for url in uncompress_name])) return urls return () '''return all url for a chat_id''' def get_chat_urls(self, user_id): names = self._find('user_url:' + str(user_id) + ':*') chat_urls = [] for name in names: keys = self.get_all_keys_for_name(name) chat_id = keys.get('chat_id') chat_name = keys.get('chat_name') user_id = keys.get('user_id') url = self.extract_url_from_names([name])[0] mapping = { 'user_id': str(user_id), 'chat_name': chat_name, 'url': url, 'chat_id': str(chat_id) } chat_urls.append(mapping) return chat_urls '''return all urls activated''' def get_urls_activated(self): active_keys = self.get_name_urls_activated() return self.extract_url_from_names(active_keys) '''return all url deactivated''' def get_name_urls_activated(self, url=None): names = self._find('user_url*') if not url else self._find( 'user_url*' + url + '*') return sorted( set([ name for name in names if self.get_value_name_key(name, 'disable') == 'False' ])) '''return all url deactivated''' def get_name_urls_deactivated(self): names = self._find('user_url*') return sorted( set([ name for name in names if self.get_value_name_key(name, 'disable') == 'True' ])) '''register a url for user or group''' def set_user_daily_liturgy(self, chat_id, chat_name, user_id): name = 'daily_liturgy:user_id:' + str(user_id) + ':chat_id:' + str( chat_id) mapping = { 'chat_id': str(chat_id), 'chat_name': chat_name, 'user_id': str(user_id), 'disable': 'False', 'last_send': str(DateHandler.get_datetime_now()) } return True if self.set_name_key(name=name, mapping=mapping) else False '''register a url for user or group''' def set_last_send_daily_liturgy(self, chat_id): name = self._find('daily_liturgy*chat_id:' + str(chat_id))[0] if name: mapping = {'last_send': str(DateHandler.datetime.now())} return True if self.set_name_key(name=name, mapping=mapping) else False else: return False '''return user info daily liturgy''' def get_chat_info_daily_liturgy(self, chat_id): names = self._find('daily_liturgy:*chat_id:' + str(chat_id)) chat_info = [] for name in names: mapping = {} keys = self.get_all_keys_for_name(name) # mapping['chat_id'] = keys.get('chat_id') mapping['chat_name'] = keys.get('chat_name') # mapping['user_id'] = keys.get('user_id') mapping['disable'] = keys.get('disable') mapping['last_send'] = keys.get('last_send') chat_info.append(mapping) return chat_info '''extract chat_id for name''' @staticmethod def extract_chat_id_from_names(names): if names: uncompress_name = [name.split(':') for name in names] chat_id = sorted( set(['{}'.format(chat_id[-1]) for chat_id in uncompress_name])) return chat_id return () '''return all chat_id activated''' def get_chat_id_activated(self): active_keys = self.get_name_chat_id_activated() return self.extract_chat_id_from_names(active_keys) '''return all chat_id activated''' def get_chat_id_deactivated(self): active_keys = self.get_name_chat_id_deactivated() return self.extract_chat_id_from_names(active_keys) '''return names for key not 'disable' == 'False''' def get_name_chat_id_activated(self): names = self._find('daily_liturgy*') return sorted( set([ name for name in names if not self.get_value_name_key(name, 'disable') == 'True' ])) '''return names for key 'disable' == 'False''' def get_name_chat_id_deactivated(self): names = self._find('daily_liturgy*') return sorted( set([ name for name in names if self.get_value_name_key(name, 'disable') == 'True' ])) '''activated all url''' def activated_all_chat_id(self): names = self._find('daily_liturgy*') for name in names: self.set_name_key(name, {'disable': 'False'}) return True '''activated all url''' def activated_all_urls(self): names = self._find('user_url*') for name in names: self.set_name_key(name, {'disable': 'False'}) return True '''get chat_id for user_id and chat_name''' def get_chat_id_for_chat_name(self, user_id, chat_name): names = self._find('daily_liturgy:*' + str(user_id) + '*') for name in names: chat_name_db = self.get_value_name_key(name, 'chat_name') chat_id_db = self.get_value_name_key(name, 'chat_id') if chat_name_db == chat_name and chat_id_db: return chat_id_db return None '''disable chat_id''' def disable_chat_id_daily_liturgy(self, chat_id): names = self._find('daily_liturgy:*chat_id:' + str(chat_id) + '*') mapping = {'disable': 'True'} disables = [ self.set_name_key(name=name, mapping=mapping) for name in names ] if names else [] return disables '''disable url for chat''' def disable_url_chat(self, chat_id): names = self._find('user_url:*chat_id:' + str(chat_id) + '*') mapping = {'disable': 'True'} disables = [ self.set_name_key(name=name, mapping=mapping) for name in names ] if names else [] return disables '''del url for chat''' def del_url_for_chat(self, chat_id, url): names = self._find('user_url:*' + str(chat_id) + '*' + url + '*') result = self.del_names(names) return True if result[0] == 1 else None '''del url for chat''' def del_chat_id_daily_liturgy(self, chat_id): names = self._find('daily_liturgy*chat_id:' + str(chat_id) + '*') result = self.del_names(names) return True if result[0] == 1 else None '''list db admins''' def list_admins(self): return self.redis.lrange('admins', 0, self.redis.llen('admins')) def backup(self): now = DateHandler.get_datetime_now() last_backup = self.get_value_name_key('backup', 'last_backup') last_backup = DateHandler.parse_datetime(last_backup) date_last_backup = DateHandler.date(last_backup) hour_last_backup = DateHandler.time(last_backup) if date_last_backup < DateHandler.date(now): if hour_last_backup <= DateHandler.time(now): mapping = {'last_backup': str(now)} self.set_name_key('backup', mapping=mapping) self.redis.save() return True else: return False
class Dict: def __init__(self, name=None, conn=None, db=None, **kwargs): if db is not None and conn is not None: raise ConnectionAmbiguity('You must provide a redis connection OR a\ database, not both') if db is None and conn is None: self.conn = StrictRedis(db=0, **kwargs) elif db is None and not conn is None: self.conn = conn elif db is not None and conn is None: self.conn = StrictRedis(db=db, **kwargs) if name is None: self.name = uuid4() else: self.name = name def keys(self): return [x.decode('utf-8') for x in self.conn.hkeys(self.name)] def delete(self, val): if isinstance(val, str): if self.conn.hexists(self.name, val): self.conn.hdel(self.name, val) else: raise KeyError() else: raise TypeError def __hash__(self): return hash(self.name, self.conn) def __getitem__(self, item): if isinstance(item, str): if self.conn.hexists(self.name, item): return self.conn.hget(self.name, item).decode('utf-8') else: raise KeyError('deleting non-existent key') else: raise TypeError('key and value must be utf-8 strings') def __setitem__(self, key, value): if isinstance(key, str) and isinstance(value, str): if key != '' and value != '': self.conn.hset(self.name, key, value) elif key == '' and value == '': raise TypeError('key and value strings are empty') elif key == '': raise EmptyKey('key is an empty string') elif value == '': raise EmptyValue('value is an empty string') else: raise TypeError('key and value must be utf-8 strings') def __repr__(self): return '<datarediset.Dict(conn={})>'.format(self.conn) def __contains__(self, item): if isinstance(item, str): return self.conn.hexists(self.name, item) else: raise TypeError def __len__(self): return len(self.get_keys)
class MYRedis(object): def __init__(self, env): self.r = StrictRedis(**redis_conf[env]) #pool = ConnectionPool(**redis_conf[env]) #self.r = StrictRedis(connection_pool=pool,decode_responses=True) self.pipe = self.r.pipeline() def incr(self, k): if self.r.exists(k): return self.r.incr(k) return self.r.set(k, 1, ex=3600 * 24 * 7) def do_incr(self,key): with self.r.pipeline() as pipe: pipe.multi() access_n = pipe.incr(key + ':access') success_n = pipe.incr(key + ':success') pipe.execute() def rpush(self, k, v): _time = self.get_time() if not self.r.exists(k): self.r.rpush(k, v) self.r.expire(k, _time) return return self.r.rpush(k, v) def lpush(self, k, v): _time = self.get_time() if not self.r.exists(k): self.r.lpush(k, v) self.r.expire(k, _time) return return self.r.lpush(k, v) def lrange(self,key): return self.r.lrange(key,0,-1) @staticmethod def get_time(): today = datetime.date.today() tomorrow = today + datetime.timedelta(days=1) tomorrow_int = int(time.mktime(tomorrow.timetuple())) second = tomorrow_int - int(time.time()) + 300 return second def lpop(self, k): return self.r.lpop(k) def llen(self, k): return self.r.llen(k) def get(self, k): # print("redis_get","###",k,self.r.get(k)) return self.r.get(k) def get_json(self, k): res = self.get(k) if res is None: return {} try: return json.loads(res.decode('utf-8')) except Exception: return {} def get_int(self, k): res = self.r.get(k) try: return int(res) except Exception: return 0 def set(self, k, v, ex=3600 * 24 * 7): return self.r.set(k, v,ex=ex) def set_json(self, k, data): assert isinstance(data, dict) return self.set(k, json.dumps(data)) def del_key(self, k): return self.r.delete(k) def hset(self,name, key,value): return self.r.hset(name, key,value) def hget(self,name,key): return self.r.hget(name,key) def hexists(self,name, key): return self.r.hexists(name, key) def __del__(self): self.r.connection_pool.disconnect()
class LibRedis(): def __init__(self, user=0): db = user % 16 self.redis = StrictRedis(host=CONF['redis']['host'], port=CONF['redis']['port'], db=db) #self.redis = StrictRedis(host=CONF['redis']['host'], port=CONF['redis']['port'], db=CONF['redis']['db']) def strSet(self, key, value): """ 设置字符串的键值对 :param key: :param value: :return: True or False """ rv = self.redis.set(key, value) logger.debug('strSet:%s', rv) if rv == 1: return True else: return False def strGet(self, key): """ 获取字符串键值对 :param key: :return: None 或者键值 """ rv = self.redis.get(key) logger.debug(rv) return rv def hashMSet(self, name, map): """ 设置字典 :param name: 字典名称,唯一 :param map: 字典,dic = {"zcx": "111111", "zcx1": "2222222"} :return: True or False """ rv = self.redis.hmset(name, map) logger.info("write dict to redis, rv(%s)", rv) if rv == 1: return True else: return False def hashSet(self, name, key, value): """ 设置字典,其中键值 :param name: 字典名称,唯一 :param key: 键 :param value : 值 :return: True or False """ rv = self.redis.hset(name, key, value) if rv == 1: return True else: return False def hashGet(self, name, key): """ 获取字典name中键为key的值 :param name: :param key: :return: None or 键值 """ rv = self.redis.hget(name, key) logger.info(rv) return rv def hashGetAll(self, name): """ 根据字典的name,获取内容, :param name: :return: dict or None """ rv = self.redis.hgetall(name) return rv def hashExists(self, name, key): """ 查找字典是否存在某个key :param name: :param key: :return: true or False """ rv = self.redis.hexists(name, key) logger.info(rv) if rv == 1: return True else: return False def hashDel(self, name, *keys): """ 删除字典中的一个或多个键 :param name: 字典名称 :param key: 字典键,单个键或者多个键。 :return: 删除数量,没有为0 """ rv = self.redis.hdel(name, *keys) logger.info(" hashDel return: %d", rv) return rv def hashincr(self, name, key, amount=1): """ hash 表中键值自加1或者指定数字 :param name: :param key: :param amount: :return: 键值自加后的结果 """ rv = self.redis.hincrby(name, key, amount) return rv def hashHlen(self, name): """ 获取哈希表字段的数量 :param name: :return: 数字,0或者其他数字 """ return self.redis.hlen(name) def setAdd(self, name, value): """ 在集合中增加一个元素 :param name: :param value: :return: True or False """ rv = self.redis.sadd(name, value) logger.info(rv) if rv > 0: return True else: return False def setCard(self, name): """ 获取集合的成员数 :param name: :return: 数字 """ num = self.redis.scard(name) logger.info("setCard return(%d)", num) return num def setSmembers(self, name): """ 获取所有的元素 :param name: :return: None or string """ rv = self.redis.smembers(name) logger.info(rv) return rv def setSpop(self, name): """ 移除并返回集合中的一个随机元素 :param name: :return: """ rv = self.redis.spop(name) logger.info(rv) return rv def setSunionstore(self, dst, source): """ 将source复制到dst :param dst: :param source: :return: the number of keys in the new set. """ rv = self.redis.sunionstore(dst, source, 'temp') logger.info(rv) return rv
print(redis.hsetnx('h', 'nx_data1', '2.5')) # 根据键获取值 print(redis.hget('h', '1').decode('utf-8')) # 根据多个键获取多个值 print(redis.hmget('h', ['1', 'nx_data'])) # 将对应键的值增加 非整型报错 增量非整型也报错 默认为1 print(redis.hincrby('h', 'nx_data', amount=1)) # 将对应键的值增加 允许浮点型 print(redis.hincrbyfloat('h', 'nx_data1', amount=1.5)) # 是否存在值 print(redis.hexists('h', 'jj')) print(redis.hexists('h', '1')) # 删除 不存在就返回0 返回删除的数量 print(redis.hdel('h', ['1'])) # 哈希表的长度 print(redis.hlen('h')) # 获取所有的键名 print(redis.hkeys('h')) # 获取所有的值 print(redis.hvals('h')) # 获取所有键值对
zrevrange(name, start, end, withscores=False) 返回key为name的zset(按score从大到小排序)中的index从start到end的所有元素 redis.zrevrange('grade', 0, 3) [b'Bob', b'Mike', b'Amy', b'James'] zrangebyscore(name, min, max, start=None, num=None, withscores=False) 返回key为name的zset中score在给定区间的元素 redis.zrangebyscore('grade', 80, 95) [b'Amy', b'James'] zcount(name, min, max) 返回key为name的zset中score在给定区间的数量 redis.zcount('grade', 80, 95) 2 zcard(name) 返回key为name的zset的元素个数 redis.zcard('grade') 3 zremrangebyrank(name, min, max) 删除key为name的zset中排名在给定区间的元素 redis.zremrangebyrank('grade', 0, 0) 1,即删除的元素个数 zremrangebyscore(name, min, max) 删除key为name的zset中score在给定区间的元素 redis.zremrangebyscore('grade', 80, 90) 1,即删除的元素个数 \Hash操作 方法 作用 示例 示例结果 hset(name, key, value) 向key为name的hash中添加映射 hset('price', 'cake', 5) 1,即添加的映射个数 hsetnx(name, key, value) 向key为name的hash中添加映射,如果映射键名不存在 hsetnx('price', 'book', 6) 1,即添加的映射个数 hget(name, key) 返回key为name的hash中field对应的value redis.hget('price', 'cake') 5 hmget(name, keys, *args) 返回key为name的hash中各个键对应的value redis.hmget('price', ['apple', 'orange']) [b'3', b'7'] hmset(name, mapping) 向key为name的hash中批量添加映射 redis.hmset('price', {'banana': 2, 'pear': 6}) True hincrby(name, key, amount=1) 将key为name的hash中映射的value增加amount redis.hincrby('price', 'apple', 3) 6,修改后的值 hexists(name, key) key为namehash中是否存在键名为key的映射 redis.hexists('price', 'banana') True hdel(name, *keys) key为namehash中删除键名为key的映射 redis.hdel('price', 'banana') True hlen(name) 从key为name的hash中获取映射个数 redis.hlen('price') 6 hkeys(name) 从key为name的hash中获取所有映射键名 redis.hkeys('price') [b'cake', b'book', b'banana', b'pear'] hvals(name) 从key为name的hash中获取所有映射键值 redis.hvals('price') [b'5', b'6', b'2', b'6'] hgetall(name) 从key为name的hash中获取所有映射键值对 redis.hgetall('price') {b'cake': b'5', b'book': b'6', b'orange': b'7', b'pear': b'6'} \RedisDump # redis-load #将数据导入到数据库中 redis-load -h # 获取帮助信息 < redis_data.json redis-load -u redis://@localhost:6379 # 将json数据导入数据库中 #redis-dump #将数据库信息导出 redis-dump -h # 获取帮助信息
class RedisLRU(object): def __init__(self, redis=None, **kwargs): if redis is not None: self._redis = redis else: self._redis = Redis(**kwargs) self.namespaces = {"default": 10000} def setup_namespace(self, namespace, size): """Set the LRU Size for a namespace. """ self.namespaces[namespace] = int(size) def _serialize(self, s): # return json.dumps(s) return s def _unserialize(self, s): # s = s.decode("utf-8") # return json.loads(s) return s def _size(self, namespace): return self.namespaces[namespace] def _hit_store(self, namespace): if namespace not in self.namespaces: raise KeyError("invalid namespace") return "cache_keys_{}".format(namespace) def _value_store(self, namespace): if namespace not in self.namespaces: raise KeyError("invalid namespace") return "cache_values_{}".format(namespace) def _expire_old(self, namespace): hits = self._hit_store(namespace) size = self._size(namespace) count = self._redis.zcard(hits) if count >= size: values = self._value_store(namespace) items = self._redis.zrange(hits, 0, count - size) logger.error(items) self._redis.zremrangebyrank(hits, 0, count - size) self._redis.hdel(values, *items) def clear(self, namespace="default"): """Clear the Cache. """ hits = self._hit_store(namespace) values = self._value_store(namespace) self._redis.delete(hits, values) def clearAll(self): """Clear all known namespaces. """ for k in self.namespaces.iterkeys(): self.clear(k) def store(self, key, value, namespace="default"): """Store a key value pair in cache. This will not update an existing item. """ values = self._value_store(namespace) if not self._redis.hexists(values, key): hits = self._hit_store(namespace) self._expire_old(namespace) self._redis.hset(values, key, self._serialize(value)) self._redis.zadd(hits, time.time(), key) else: hits = self._hit_store(namespace) self._redis.hset(values, key, self._serialize(value)) self._redis.zadd(hits, time.time(), key) def get(self, key, namespace="default"): """Get a value from the cache. returns none if the key is not found. """ values = self._value_store(namespace) value = self._redis.hget(values, key) if value: hits = self._hit_store(namespace) self._redis.zadd(hits, time.time(), key) return self._unserialize(value) return None def expire(self, key, namespace="default"): """Expire (invalidate) a key from the cache. """ values = self._value_store(namespace) if self._redis.hexists(values, key): hits = self._hit_store(namespace) self._redis.hdel(values, key) self._redis.zrem(hits, key)
potential_label_keys = ['classification', 'reproducible', 'status'] for key in potential_label_keys: if key in entry and len(entry[key]) > 0: if "duplicate of" in entry[key] or "dup of" in entry[key] or "dupe of" in entry[key]: label_value = "duplicate" else: label_value = entry[key] label = u"{}:{}".format(key, label_value.lower()) if should_add_given_labels(label, all_labels): labels.add(label) all_labels.add(label) data['labels'] = list(labels) if r.hexists(RADARS_KEY, radar_id): # Update the Radar issue_id = r.hget(RADARS_KEY, radar_id) if 'resolved' in entry and len(entry['resolved']) > 0: data['state'] = 'closed' comment_body = "Resolved: {resolved}\nModified: {modified}".format(**entry) else: comment_body = "Modified: {modified}".format(**entry) issue_url = issues_url + "/" + issue_id comment_url = issues_url + "/" + issue_id + "/comments" requests.patch(issue_url, data=json.dumps(data), headers=HEADERS) comment_data = { 'body': comment_body
class LibRedis: # 默认所有key的前缀 key_prefix = 'RAWE_' # redis 连接对象 obj_redis = None # 默认的过期时间为3天 DEFAULT_EXPIRE = 259200 def __init__(self, host='127.0.0.1', port=6379, db=0, prefix=None, charset='utf-8'): """ 初始化 """ if not host or not port: return None if prefix: self.key_prefix = prefix.strip() # construct self.obj_redis = StrictRedis( host=host, port=port, db=db, charset='utf-8') def key_make(self, keyname=None): """ 处理所有key,增加前缀 如果实例化时没有设置,则使用默认前缀 """ if not keyname: return None return self.key_prefix + str(keyname).strip() def set_expire(self, keyname=None): """ 设置key的过期时间,装饰器调用 """ if not keyname: return None return self.obj_redis.expire(self.key_make(keyname), self.DEFAULT_EXPIRE) # -------------------------------------------------------- # String # -------------------------------------------------------- @wraps_set_expire def set(self, keyname=None, value=None): """ 设置指定 key 的值。 如果 key 已经存储其他值, SET 就覆写旧值,且无视类型。 return: 设置操作成功完成时,才返回 OK """ if not keyname or value is None: return None keyname = self.key_make(keyname.strip()) if isinstance(value, str): value = value.strip() return self.obj_redis.set(keyname, value) def get(self, keyname=None): """ 获取指定 key 的值。 return: key 的值 如果 key 不存在,返回 nil。 如果key 储存的值不是字符串类型,返回一个错误。 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.get(keyname) return None if not result else bytes.decode(result) def delete(self, keyname=None): """ 删除已存在的键。不存在的 key 会被忽略 return: 被删除 key 的数量 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.delete(keyname) @wraps_set_expire def append(self, keyname=None, value=None): """ 为指定的 keyname 追加值 如果 keyname 已经存在并且是一个字符串, APPEND 命令将 value 追加到 keyname 原来的值的末尾。 如果 keyname 不存在, APPEND 就简单地将给定 keyname 设为 value ,就像执行 SET keyname value 一样 return: 追加指定值之后, keyname 中字符串的长度 """ if not keyname or value is None: return None keyname = self.key_make(keyname.strip()) if isinstance(value, str): value = value.strip() else: value = str(value) return self.obj_redis.append(keyname, value) @wraps_set_expire def incr(self, keyname=None, expire=None): """ 将 keyname 中储存的数字值增一。 如果 keyname 不存在,那么 key 的值会先被初始化为 0 ,然后再执行 INCR 操作。 如果值包含错误的类型,或字符串类型的值不能表示为数字,那么返回一个错误。 本操作的值限制在 64 位(bit)有符号数字表示之内。 return: 执行 INCR 命令之后 key 的值 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.incr(keyname, 1) @wraps_set_expire def incrBy(self, keyname=None, amount=1): """ 将 keyname 中储存的数字加上指定的增量值。 如果 keyname 不存在,那么 key 的值会先被初始化为 0 ,然后再执行 INCRBY 命令 如果值包含错误的类型,或字符串类型的值不能表示为数字,那么返回一个错误。 本操作的值限制在 64 位(bit)有符号数字表示之内。 return: 加上指定的增量值之后, key 的值 """ if not keyname or not amount: return None keyname = self.key_make(keyname.strip()) if isinstance(amount, int): amount = max(0, amount) else: amount = 1 return self.obj_redis.incrby(keyname, amount) @wraps_set_expire def decr(self, keyname=None): """ 将 key 中储存的数字值减一。 如果 key 不存在,那么 key 的值会先被初始化为 0 ,然后再执行 DECR 操作。 如果值包含错误的类型,或字符串类型的值不能表示为数字,那么返回一个错误。 本操作的值限制在 64 位(bit)有符号数字表示之内。 return: 执行命令之后 key 的值 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.decr(keyname, 1) @wraps_set_expire def decrBy(self, keyname=None, amount=1): """ 将 keyname 所储存的值减去指定的减量值。 如果 keyname 不存在,那么 key 的值会先被初始化为 0 ,然后再执行 DECRBY 操作。 如果值包含错误的类型,或字符串类型的值不能表示为数字,那么返回一个错误。 本操作的值限制在 64 位(bit)有符号数字表示之内 """ if not keyname or not amount: return None keyname = self.key_make(keyname.strip()) amount = int(amount) return self.obj_redis.decr(keyname, amount) # -------------------------------------------------------- # Hash 哈希 # 一个string类型的field和value的映射表,hash特别适合用于存储对象 # 每个 hash 可以存储 232 - 1 键值对(40多亿) # -------------------------------------------------------- @wraps_set_expire def hSet(self, keyname=None, key=None, value=None): """ 从哈希名为keyname中添加key1->value1 将哈希表key中的域field的值设为value。-ok -ok 如果key不存在,一个新的哈希表被创建并进行hset操作。 如果域field已经存在于哈希表中,旧值将被覆盖。 错误则 返回 FALSE 如果字段是哈希表中的一个新建字段,并且值设置成功,返回 1 。 如果哈希表中域字段已经存在且旧值已被新值覆盖,返回 0 。 """ if not keyname or not key or value is None: return None keyname = self.key_make(keyname.strip()) key = key.strip() return self.obj_redis.hset(keyname, key, value) @wraps_set_expire def hGet(self, keyname=None, key=None): """ 获取存储在哈希表中指定字段的值 返回给定字段的值。如果给定的字段或 key 不存在时,返回 None """ if not keyname or not key: return None keyname = self.key_make(keyname.strip()) key = key.strip() result = self.obj_redis.hget(keyname, key) if not result: return None # bytes to str return bytes.decode(result) @wraps_set_expire def hLen(self, keyname=None): """ 获取哈希表中字段的数量 哈希表中字段的数量。 当 keyname 不存在时,返回 0 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.hlen(keyname) @wraps_set_expire def hKeys(self, keyname=None): """ 获取哈希表中的所有域(field) 包含哈希表中所有域(field)列表。 当 key 不存在时,返回一个空列表 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.hkeys(keyname) if not result: return None # bytes to str ret_list = list() for v in result: ret_list.append(bytes.decode(v)) return ret_list @wraps_set_expire def hVals(self, keyname=None): """ 哈希表所有域(field)的值 包含哈希表中所有域(field)值的列表。 当 key 不存在时,返回一个空表 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.hvals(keyname) if not result: return None # bytes to str ret_list = list() for v in result: ret_list.append(bytes.decode(v)) return ret_list @wraps_set_expire def hGetAll(self, keyname=None): """ 获取在哈希表中指定 keyname 的所有字段和值 返回哈希表中,所有的字段和值 在返回值里,紧跟每个字段名(field name)之后是字段的值(value), 所以返回值的长度是哈希表大小的两倍。 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.hgetall(keyname) if not result: return None # bytes to str ret_dict = dict() for k, v in result.items(): ret_dict[bytes.decode(k)] = bytes.decode(v) return ret_dict def hExists(self, keyname=None, key=None): """ 查看哈希表 keyname 中,是否存在键名为key的字段 ashname含有给定字段key,返回 True。 keyname不存在 或 key 不存在,返回 False """ if not keyname or key is None: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.hexists(keyname, key) def hDel(self, keyname=None, *keys): """ 删除哈希表 key 中的一个或多个指定字段,不存在的字段将被忽略 返回值 被成功删除字段的数量,不包括被忽略的字段 keyname 或 key 不存在则返回 0 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.hdel(keyname, *keys) # -------------------------------------------------------- # List 列表, 左(Left)为头部,右(Right)为尾部 # 一个列表最多可以包含 232 - 1 个元素 (4294967295, 每个列表超过40亿个元素) # -------------------------------------------------------- @wraps_set_expire def lPush(self, keyname=None, *values): """ 将一个或多个值插入到列表头部, 返回操作后列表的长度。 如果 key 不存在,一个空列表会被创建并执行 LPUSH 操作。 当 key 存在但不是列表类型时,返回一个错误。 注意:在Redis 2.4版本以前的 LPUSH 命令,都只接受单个 value 值 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.lpush(keyname, *values) @wraps_set_expire def lPop(self, keyname=None): """ 弹出队列头部元素,移除并返回列表的第一个元素。 返回列表的第一个元素。 当列表 key 不存在时,返回 None """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.lpop(keyname) @wraps_set_expire def rPush(self, keyname=None, *values): """ 将一个或多个值插入到列表的尾部(最右边), 返回操作后列表的长度。 如果列表不存在,一个空列表会被创建并执行 RPUSH 操作。 当列表存在但不是列表类型时,返回一个错误。 注意:在 Redis 2.4 版本以前的 RPUSH 命令,都只接受单个 value 值。 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.rpush(keyname, *values) @wraps_set_expire def rPop(self, keyname=None): """ 移除并获取列表最后一个元素 返回列表的最后一个元素。 当列表不存在时,返回 None """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.rpop(keyname) if not result: return None # bytes to str return bytes.decode(result) @wraps_set_expire def lLen(self, keyname=None): """ 获取列表长度 如果列表 key 不存在,则 key 被解释为一个空列表,返回 0 如果 key 不是列表类型,返回一个错误 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.llen(keyname) @wraps_set_expire def lTrim(self, keyname=None, start=0, end=-1): """ 让列表只保留指定区间内的元素,不在指定区间之内的元素都将被删除 下标 0 表示列表的第一个元素,1 表示列表的第二个元素 -1 表示列表的最后一个元素,-2 表示列表的倒数第二个元素 返回 True """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.ltrim(keyname, start, end) @wraps_set_expire def lGetRange(self, keyname=None, start=0, end=-1): """ 返回列表中指定区间内的元素,区间以偏移量 START 和 END 指定 下标 0 表示列表的第一个元素,以 1 表示列表的第二个元素 -1 表示列表的最后一个元素, -2 表示列表的倒数第二个元素 返回一个列表,包含指定区间内的元素 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.lrange(keyname, start, end) if not result: return None # bytes to str ret_list = list() for v in result: ret_list.append(bytes.decode(v)) return ret_list @wraps_set_expire def lRemove(self, keyname=None, value=None, count=1): """ 根据参数 COUNT 的值,移除列表中与参数 VALUE 相等的元素。 COUNT 的值可以是以下几种: count > 0 : 从表头开始向表尾搜索,移除与 VALUE 相等的元素,数量为 COUNT 。 count < 0 : 从表尾开始向表头搜索,移除与 VALUE 相等的元素,数量为 COUNT 的绝对值。 count = 0 : 移除表中所有与 VALUE 相等的值。 返回被移除元素的数量。 列表或元素不存在时返回 0 """ if not keyname or value is None: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.lrem(keyname, count, value) # -------------------------------------------------------- # Set 无序集合 # Set 是 String 类型的无序集合。集合成员是唯一的。 # 集合是通过哈希表实现的,所以添加,删除,查找的复杂度都是 O(1) # 集合中最大的成员数为 232 - 1 (4294967295, 每个集合可存储40多亿个成员) # -------------------------------------------------------- @wraps_set_expire def sAdd(self, keyname=None, *values): """ 将一个或多个成员元素加入到集合中,已经存在于集合的成员元素将被忽略。 假如集合 key 不存在,则创建一个只包含添加的元素作成员的集合。 当集合 key 不是集合类型时,返回一个错误。 注意:在Redis2.4版本以前, SADD 只接受单个成员值。 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.sadd(keyname, *values) @wraps_set_expire def sCard(self, keyname=None): """ 获取集合key中元素的数量 集合的数量。 当集合 key 不存在时,返回 0 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.scard(keyname) def sDiff(self, keyname=None, *keys): """ 差集 返回所给key列表想减后的集合,相当于求差集 不存在的集合 key 将视为空集。 请注意顺序是前面的集合,减去后面的集合,求差集 返回包含差集成员的列表 """ if not keyname: return None other_keys = list() for k in keys: other_keys.append(self.key_make(k)) result = self.obj_redis.sdiff(keyname, *other_keys) if not result: return None # bytes to str ret_set = set() for v in result: ret_set.add(bytes.decode(v)) return ret_set @wraps_set_expire def sDiffStore(self, store_key=None, key=None, *keys): """ 差集并存储 给定所有集合的差集并存储在 store_key 中 将给定集合之间的差集存储在指定的集合中。 如果指定的集合 key 已存在,则会被覆盖 返回store_key结果集中的元素数量 """ if not store_key or not key: return None store_key = self.key_make(store_key.strip()) key = self.key_make(key.strip()) other_keys = list() for k in keys: other_keys.append(self.key_make(k)) return self.obj_redis.sdiffstore(store_key, key, *other_keys) def sInter(self, keyname=None, *keys): """ 交集 返回给定所有给定集合的交集。 不存在的集合 key 被视为空集。 当给定集合当中有一个空集或key不存在时,结果也为空集(根据集合运算定律)。 """ if not keyname: return None keyname = self.key_make(keyname.strip()) other_keys = list() for k in keys: other_keys.append(self.key_make(k)) result = self.obj_redis.sinter(keyname, *other_keys) if not result: return None # bytes to str ret_set = set() for v in result: ret_set.add(bytes.decode(v)) return ret_set @wraps_set_expire def sInterStore(self, store_key=None, key=None, *keys): """ 交集并存储 将给定集合之间的交集存储在指定的集合store_key中。 如果指定的集合已经存在,则将其覆盖 返回store_key存储交集的集合的元素数量 """ if not store_key or not key: return None store_key = self.key_make(store_key.strip()) key = self.key_make(key.strip()) other_keys = list() for k in keys: other_keys.append(self.key_make(k)) return self.obj_redis.sinterstore(store_key, key, *other_keys) def sUnion(self, keyname=None, *keys): """ 并集 所给key列表所有的值,相当于求并集 给定集合的并集。不存在的集合 key 被视为空集。 返回并集成员的列表 """ if not keyname: return None keyname = self.key_make(keyname.strip()) other_keys = list() for k in keys: other_keys.append(self.key_make(k)) result = self.obj_redis.sunion(keyname, *other_keys) if not result: return None # bytes to str ret_set = set() for v in result: ret_set.add(bytes.decode(v)) return ret_set @wraps_set_expire def sUnionStore(self, store_key=None, key=None, *keys): """ 并集存储 将给定集合的并集存储在指定的集合 store_key 中。 如果 store_key 已经存在,则将其覆盖 返回store_key存储并集的集合的元素数量 """ if not store_key or not key: return None store_key = self.key_make(store_key.strip()) key = self.key_make(key.strip()) other_keys = list() for k in keys: other_keys.append(self.key_make(k)) return self.obj_redis.sunionstore(store_key, key, *other_keys) @wraps_set_expire def sIsMember(self, keyname=None, value=None): """ 判断成员元素是否是集合的成员 如果成员元素是集合的成员,返回 True 如果成员元素不是集合的成员,或 key 不存在,返回 False """ if not keyname or value is None: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.sismember(keyname, value) @wraps_set_expire def sMembers(self, keyname=None): """ 返回集合中的所有的成员。 不存在的集合 key 被视为空集合 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.smembers(keyname) if not result: return None # bytes to str ret_set = set() for v in result: ret_set.add(bytes.decode(v)) return ret_set @wraps_set_expire def sRem(self, keyname=None, *values): """ 删除该数组中对应的值 移除集合中的一个或多个成员元素,不存在的成员元素会被忽略。 当 key 不是集合类型,返回一个错误。 在 Redis 2.4 版本以前, SREM 只接受单个成员值。 返回被成功移除的元素的数量,不包括被忽略的元素 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.srem(keyname, *values) @wraps_set_expire def sPop(self, keyname=None): """ 移除并返回集合中的一个随机元素 将随机元素从集合中移除并返回 移除的随机元素。 当集合不存在或是空集时,返回 None """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.spop(keyname) # bytes to str return None if not result else bytes.decode(result) @wraps_set_expire def sRandMember(self, keyname=None, count=1): """ 返回集合中的随机元素,而不对集合进行任何改动 从 Redis 2.6 版本开始, Srandmember 命令接受可选的 count 参数: 如果 count 为正数,且小于集合基数, 返回一个包含 count 个元素的数组,数组中的元素各不相同。 如果 count 大于等于集合基数,那么返回整个集合。 如果 count 为负数,返回一个数组, 数组中的元素可能会重复出现多次,而数组的长度为 count 的绝对值。 返回:随机个数的元素列表 """ if not keyname: return None keyname = self.key_make(keyname.strip()) if isinstance(count, int): count = max(0, count) else: count = 1 result = self.obj_redis.srandmember(keyname, count) if not result: return None # bytes to str ret_list = list() for v in result: ret_list.append(bytes.decode(v)) return ret_list # -------------------------------------------------------- # Zset( sorted set ) 有序集合 # 有序集合和集合一样也是string类型元素的集合,且不允许重复的成员 # 有序集合的成员是唯一的,但分数(score)却可以重复 # 集合是通过哈希表实现的,所以添加,删除,查找的复杂度都是O(1) # 集合中最大的成员数为 232 - 1 (4294967295, 每个集合可存储40多亿个成员) # -------------------------------------------------------- @wraps_set_expire def zAdd(self, keyname=None, **kwargs): """ 将一个或多个成员元素及其分数值加入到有序集当中。 如果某个成员已经是有序集的成员,那么更新这个成员的分数值, 并通过重新插入这个成员元素,来保证该成员在正确的位置上。 如果有序集合 key 不存在,则创建一个空的有序集并执行 ZADD 操作。 当 key 存在但不是有序集类型时,返回一个错误。 返回: 被成功添加的新成员的数量,不包括那些被更新的、已经存在的成员。 注意: 在 Redis 2.4 版本以前, ZADD 每次只能添加一个元素 **kwargs: name1=score1, name2=score2 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.zadd(keyname, **kwargs) def zRangeByScore(self, keyname=None, min=None, max=None, withscores=False): """ 分数值正序 返回有序集中指定分数区间内的所有的成员。 有序集成员按分数值递减(从大到小)的次序排列。 具有相同分数值的成员按字典序的逆序(reverse lexicographical order )排列 返回; 指定区间内,带有分数值(可选)的有序集成员的列表 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.zrangebyscore( keyname, min, max, withscores=withscores) if not result: return None # bytes to str if not withscores: # return list zret = list() for field in result: zret.append(bytes.decode(field)) else: # return dict zret = list() for field, score in result: zret.append((bytes.decode(field), score)) zret = dict(zret) return zret def zRevRangeByScore(self, keyname=None, max=None, min=None, withscores=False): """ 分数值逆序 返回有序集中指定分数区间内的所有的成员。 有序集成员按分数值递减(从大到小)的次序排列。 具有相同分数值的成员按字典序的逆序(reverse lexicographical order )排列。 返回; 指定区间内,带有分数值(可选)的有序集成员的列表 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.zrevrangebyscore( keyname, max, min, withscores=withscores) if not result: return None # bytes to str if not withscores: # return list zret = list() for field in result: zret.append(bytes.decode(field)) else: # return dict zret = list() for field, score in result: zret.append((bytes.decode(field), score)) zret = dict(zret) return zret def zRank(self, keyname=None, member=None): """ 排名正序 返回有序集中指定成员的排名。 其中有序集成员按分数值递增(从小到大)顺序排列 如果成员是有序集 key 的成员,返回 member 的排名。 如果成员不是有序集 key 的成员,返回 None 。 """ if not keyname or member is None: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.zrank(keyname, member) def zRevRank(self, keyname=None, member=None): """ 排名逆序 返回有序集中指定成员的排名。 其中有序集成员按分数值递减(从大到小)排序 如果成员是有序集 key 的成员,返回 member 的排名。 如果成员不是有序集 key 的成员,返回 None 。 """ if not keyname or member is None: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.zrevrank(keyname, member) def zRange(self, keyname=None, start=None, end=None, withscores=False): """ 位置正序 返回有序集中,指定区间内的成员。 其中成员的位置按分数值递增(从小到大)来排序。 具有相同分数值的成员按字典序(lexicographical order )来排列 返回指定区间内,带有分数值(可选)的有序集成员的列表 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.zrange( keyname, start, end, withscores=withscores) if not result: return None # bytes to str if not withscores: # return list zret = list() for field in result: zret.append(bytes.decode(field)) else: # return dict zret = list() for field, score in result: zret.append((bytes.decode(field), score)) zret = dict(zret) return zret def zRevrange(self, keyname=None, start=None, end=None, withscores=False): """ 位置逆序 返回有序集中,指定区间内的成员。 其中成员的位置按分数值递减(从大到小)来排列。 具有相同分数值的成员按字典序的逆序(reverse lexicographical order)排列 返回指定区间内,带有分数值(可选)的有序集成员的列表 """ if not keyname: return None keyname = self.key_make(keyname.strip()) result = self.obj_redis.zrevrange( keyname, start, end, withscores=withscores) if not result: return None # bytes to str if not withscores: # return list zret = list() for field in result: zret.append(bytes.decode(field)) else: # return dict zret = list() for field, score in result: zret.append((bytes.decode(field), score)) zret = dict(zret) return zret def zRem(self, keyname, *member): """ 移除有序集中的一个或多个成员,不存在的成员将被忽略。 当 key 存在但不是有序集类型时,返回一个错误。 注意: 在 Redis 2.4 版本以前, ZREM 每次只能删除一个元素。 返回被成功移除的成员的数量,不包括被忽略的成员0 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.zrem(keyname, *member) def zRemRangeByRank(self, keyname=None, min=None, max=None): """ 删除正序 移除有序集中,指定排名(rank)区间内的所有成员 返回被移除成员的数量 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.zremrangebyrank(keyname, min, max) def zRemrangebyscore(self, keyname=None, min=None, max=None): """ 删除正序 移除有序集中,指定分数(score)区间内的所有成员 返回被移除成员的数量 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.zremrangebyscore(keyname, min, max) def zCard(self, keyname=None): """ 计算集合中元素的数量 当 key 存在且是有序集类型时,返回有序集的基数。 当 key 不存在时,返回 0 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.zcard(keyname) def zCount(self, keyname=None, min=None, max=None): """ 计算有序集合中指定分数区间的成员数量 返回分数值在 min 和 max 之间的成员的数量 """ if not keyname: return None keyname = self.key_make(keyname.strip()) return self.obj_redis.zcount(keyname, min, max)
class RedisJobStore(BaseJobStore): """ Stores jobs in a Redis database. Any leftover keyword arguments are directly passed to redis's :class:`~redis.StrictRedis`. Plugin alias: ``redis`` :param int db: the database number to store jobs in :param str jobs_key: key to store jobs in :param str job_submissions_key: key to store job_submissions in :param str run_times_key: key to store the jobs' run times in :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the highest available """ def __init__(self, db=0, jobs_key='apscheduler.jobs', job_submissions_key='apscheduler.job_submissions', run_times_key='apscheduler.run_times', pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): super(RedisJobStore, self).__init__() if db is None: raise ValueError('The "db" parameter must not be empty') if not jobs_key: raise ValueError('The "jobs_key" parameter must not be empty') if not run_times_key: raise ValueError('The "run_times_key" parameter must not be empty') self.pickle_protocol = pickle_protocol self.jobs_key = jobs_key self.job_submissions_key = job_submissions_key self.run_times_key = run_times_key self.redis = StrictRedis(db=int(db), **connect_args) def lookup_job(self, job_id): job_state = self.redis.hget(self.jobs_key, job_id) return self._reconstitute_job(job_state) if job_state else None def get_due_jobs(self, now): timestamp = datetime_to_utc_timestamp(now) job_ids = self.redis.zrangebyscore(self.run_times_key, 0, timestamp) if job_ids: job_states = self.redis.hmget(self.jobs_key, *job_ids) return self._reconstitute_jobs(six.moves.zip(job_ids, job_states)) return [] def get_next_run_time(self): next_run_time = self.redis.zrange(self.run_times_key, 0, 0, withscores=True) if next_run_time: return utc_timestamp_to_datetime(next_run_time[0][1]) def add_job_submission(self, job, now): with self.redis.pipeline() as pipe: while 1: try: job_submission = { 'state': 'submitted', 'func': job.func if isinstance(job.func, six.string_types) else job.func.__name__, 'submitted_at': now, 'apscheduler_job_id': job.id } # Use length of hash to set ID of job_submissions pipe.watch(self.job_submissions_key) current_length = pipe.hlen(self.job_submissions_key) job_submission_id = int(current_length) + 1 pipe.multi() pipe.hset(self.job_submissions_key, str(job_submission_id), pickle.dumps(job_submission, self.pickle_protocol)) pipe.execute() break except WatchError: # This should never happen due to the jobstore lock ! self._logger.exception("WatchError was raised in Redis jobstore! Multiple " + "threads/workers are writing to the jobstore at 1 " + "time! This shouldn't happen due to the jobstore lock") raise return job_submission_id def update_job_submissions(self, conditions, **kwargs): # Get all jobs that satisfy conditions job_submissions_dict = self.redis.hgetall(self.job_submissions_key) with self.redis.pipeline() as pipe: for key in job_submissions_dict: job_sub = pickle.loads(job_submissions_dict[key]) update_flag = True for column in conditions: val = conditions[column] if job_sub[column] != val: update_flag = False if update_flag: job_sub.update(kwargs) pipe.hset(self.job_submissions_key, key, pickle.dumps(job_sub, self.pickle_protocol)) pipe.execute() def update_job_submission(self, job_submission_id, **kwargs): with self.redis.pipeline() as pipe: pipe.hset(self.job_submissions_key, str(job_submission_id), pickle.dumps(kwargs, self.pickle_protocol)) pipe.execute() def get_job_submission(self, job_submission_id): pickled_job_submission = self.redis.hget(self.job_submissions_key, str(job_submission_id)) if not pickled_job_submission: return None job_sub = pickle.loads(pickled_job_submission) job_sub.update({'id': job_submission_id}) return job_sub def get_job_submissions_with_states(self, states=[]): job_submissions = [] job_submissions_dict = self.redis.hgetall(self.job_submissions_key) for key in job_submissions_dict: job_submission = pickle.loads(job_submissions_dict[key]) if len(states) == 0 or job_submission['state'] in states: job_submission.update({"id": int(key)}) job_submissions.append(job_submission) return job_submissions def get_all_jobs(self): job_states = self.redis.hgetall(self.jobs_key) jobs = self._reconstitute_jobs(six.iteritems(job_states)) paused_sort_key = datetime(9999, 12, 31, tzinfo=utc) return sorted(jobs, key=lambda job: job.next_run_time or paused_sort_key) def add_job(self, job): if self.redis.hexists(self.jobs_key, job.id): raise ConflictingIdError(job.id) with self.redis.pipeline() as pipe: pipe.multi() pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), self.pickle_protocol)) if job.next_run_time: pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id) pipe.execute() def update_job(self, job): if not self.redis.hexists(self.jobs_key, job.id): raise JobLookupError(job.id) with self.redis.pipeline() as pipe: pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), self.pickle_protocol)) if job.next_run_time: pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id) else: pipe.zrem(self.run_times_key, job.id) pipe.execute() def remove_job(self, job_id): if not self.redis.hexists(self.jobs_key, job_id): raise JobLookupError(job_id) with self.redis.pipeline() as pipe: pipe.hdel(self.jobs_key, job_id) pipe.zrem(self.run_times_key, job_id) pipe.execute() def remove_all_jobs(self): with self.redis.pipeline() as pipe: pipe.delete(self.jobs_key) pipe.delete(self.run_times_key) pipe.execute() def remove_all_job_submissions(self): with self.redis.pipeline() as pipe: pipe.delete(self.job_submissions_key) pipe.execute() def shutdown(self): self.redis.connection_pool.disconnect() def _reconstitute_job(self, job_state): job_state = pickle.loads(job_state) job = Job.__new__(Job) job.__setstate__(job_state) job._scheduler = self._scheduler job._jobstore_alias = self._alias return job def _reconstitute_jobs(self, job_states): jobs = [] failed_job_ids = [] for job_id, job_state in job_states: try: jobs.append(self._reconstitute_job(job_state)) except: self._logger.exception('Unable to restore job "%s" -- removing it', job_id) failed_job_ids.append(job_id) # Remove all the jobs we failed to restore if failed_job_ids: with self.redis.pipeline() as pipe: pipe.hdel(self.jobs_key, *failed_job_ids) pipe.zrem(self.run_times_key, *failed_job_ids) pipe.execute() return jobs def __repr__(self): return '<%s>' % self.__class__.__name__