def test_cache_delete_expired(cache: Cache, timer: Timer): """Test that cache.delete_expired() removes all expired keys.""" ttl = 1 ttl_keys = list(range(5)) non_ttl_keys = list(range(5, 10)) all_keys = ttl_keys + non_ttl_keys for key in ttl_keys: cache.set(key, key, ttl=ttl) for key in non_ttl_keys: cache.set(key, key) assert len(cache) == len(all_keys) cache.delete_expired() assert len(cache) == len(all_keys) timer.time = ttl cache.delete_expired() assert len(cache) == len(non_ttl_keys) for key in ttl_keys: assert not cache.has(key) for key in non_ttl_keys: assert cache.has(key)
def test_cache_memoize_ttl(cache: Cache, timer: Timer): """Test that cache.memoize() can set a TTL.""" ttl1 = 5 ttl2 = ttl1 + 1 @cache.memoize(ttl=ttl1) def func1(a): return a @cache.memoize(ttl=ttl2) def func2(a): return a func1(1) func2(1) assert len(cache) == 2 key1, key2 = tuple(cache.keys()) timer.time = ttl1 - 1 assert cache.has(key1) assert cache.has(key2) timer.time = ttl1 assert not cache.has(key1) assert cache.has(key2) timer.time = ttl2 assert not cache.has(key2)
def test_cache_has(cache: Cache): """Test that cache.has() returns whether a key exists or not.""" key, value = ("key", "value") assert not cache.has(key) assert key not in cache cache.set(key, value) assert cache.has(key) assert key in cache
def test_cache_delete(cache: Cache): """Test that cache.delete() removes a cache key.""" key, value = ("key", "value") cache.set(key, value) assert cache.has(key) cache.delete(key) assert not cache.has(key) cache.delete(key) assert not cache.has(key)
def test_cache_set_ttl_default(cache: Cache, timer: Timer): """Test that cache.set() uses a default TTL if initialized with one.""" default_ttl = 2 cache.configure(ttl=default_ttl) cache.set("key", "value") assert cache.has("key") timer.time = default_ttl - 1 assert cache.has("key") timer.time = default_ttl assert not cache.has("key")
def test_cache_set_ttl_override(cache: Cache, timer: Timer): """Test that cache.set() can override the default TTL.""" default_ttl = 1 cache.configure(ttl=default_ttl) cache.set("key1", "value1") cache.set("key2", "value2", ttl=default_ttl + 1) timer.time = default_ttl assert not cache.has("key1") assert cache.has("key2") timer.time = default_ttl + 1 assert not cache.has("key2")
class ScanQRCode(threading.Thread): def __init__(self, q): threading.Thread.__init__(self) self.running = True self.queue = q self.vs = None self.cache = Cache(ttl=5) def stopThread(self): self.running = False if not self.vs: self.vs.stop() def run(self): self.vs = VideoStream(usePiCamera=True).start() time.sleep(2) logging.info("scan qrcode thread is running...") while self.running: frame = self.vs.read() frame = imutils.resize(frame, width=400) barcodes = pyzbar.decode(frame) for barcode in barcodes: data = barcode.data if self.cache.has(data): continue self.cache.add(data, None) msg = Msg(msgType=MsgType.SCAN_QR_CODE, load=barcode.data) logging.info("scan qrcode thread put msg: " + str(msg.load)) self.queue.put(msg)
def test_cache_evict(cache: Cache): """Test that cache.evict() will remove cache keys to make room.""" maxsize = 5 cache.configure(maxsize=maxsize) for key in range(maxsize): cache.set(key, key) assert len(cache) == maxsize assert cache.has(0) for key in range(maxsize): next_key = -(key + 1) cache.set(next_key, next_key) assert len(cache) == maxsize assert not cache.has(key)
class Predictor_Ocr: def __init__(self, ip, port, password): self.ip = ip self.port = port self.password = password self.redis_pool = redis.ConnectionPool(host=ip, port=port, db=0, password=password, encoding='utf-8') self.cache = Cache() self.cur = os.getcwd() def get_model(self, model_key): if self.cache.has(model_key) is True: return self.cache.get(model_key) else: path = "%s/%s/data/%s.h5" % (self.cur, model_key, model_key) model = load_model(path) self.cache.set(model_key, model) return model def work(self): while True: width = 64 height = 64 red = redis.Redis(connection_pool=self.redis_pool) task_str = red.lpop("aiocr") if task_str is not None: task = json.loads(task_str) create_user_id = task["create_user_id"] image_id = task["image_id"] polygon_id = task["polygon_id"] image = task["image"] algorithm_set = task["algorithm"] image_list = [image] image_batch = np.array(image_list, dtype=np.float32).reshape( -1, width, height, 1) image_batch = image_batch / 255.0 out_list = [] top_k = 5 thr = 0.1 for algorithm in algorithm_set: path = "%s/%s/data/elem_list.json" % (self.cur, algorithm) with open(path, 'r') as f: elem_list = json.load(f) model = self.get_model(algorithm) out = model.predict(image_batch)[0] top_candidate = out.argsort()[::-1][0:top_k] for item in top_candidate: if out[item] > thr and elem_list[item] > -1: out_list.append(elem_list[item]) key = "%s_%s_%s_%s" % ("rs_aiocr", create_user_id, image_id, polygon_id) red.set(key, json.dumps(out_list)) time.sleep(0.005)
# func.uncached(1, 2) # assert cache.copy() == {1: "foobar", 2: ("foo", "bar", "baz")} cache.set_many({"a": 1, "b": 2, "c": 3}) assert list(cache.keys()) == ["a", "b", "c"] assert list(cache.values()) == [1, 2, 3] assert list(cache.items()) == [("a", 1), ("b", 2), ("c", 3)] for key in cache: print(key, cache.get(key)) # 'a' 1 # 'b' 2 # 'c' 3 assert cache.has("a") assert "a" in cache cacheman = CacheManager({ "a": { "maxsize": 100 }, "b": { "maxsize": 200, "ttl": 900 }, "c": {} }) cacheman["a"].set("key1", "value1") value = cacheman["a"].get("key")
class KeyValue: #define two stores a dictionary for indeterminate storage and a ttl cache for expiring data def __init__(self): self.cache = {} self.cache_ttl = Cache(maxsize=MAX_SIZE, ttl=0, timer=time.time, default=None) #add data to cache if no ttl. add to cache_ttl if time limit provided async def put(self, key, value, expire_time=DEFAULT_TIME): if (not self._checkKey(key)): raise KeyError if (not self._checkValue(value)): raise KeyError if expire_time != 0: #if data has expire time set to ttl cache and delete if exists in indeterminate cache self.cache_ttl.set(key, value, ttl=expire_time) await self._delete_cache(key) else: self.cache[key] = value return 1 #retrieve data if avialable async def retrieve(self, key): if (not self._checkKey(key)): raise KeyError result = await self._retrieve_cache(key) result_ttl = await self._retrieve_cache_ttl(key) if (result == False and result_ttl == False): raise KeyError elif result: return result else: return result_ttl async def delete(self, key): if (not self._checkKey(key)): raise KeyError await self._delete_cache(key) await self._delete_cache_ttl(key) return 1 #retrieval for cache and ttl cache async def _retrieve_cache(self, key): if (not await self._contains_cache(key)): return False return self.cache[key] async def _retrieve_cache_ttl(self, key): if (not await self._contains_cache_ttl(key)): return False return self.cache_ttl.get(key) #deletion for cache and ttl cache async def _delete_cache(self, key): if (not await self._contains_cache(key)): return 1 del self.cache[key] return 1 async def _delete_cache_ttl(self, key): if (not await self._contains_cache_ttl(key)): return 1 del self.cache_ttl[key] return 1 #check key and value being alpha numberic strings of approriate length def _checkKey(self, key): if (isinstance(key, str) and key.isalnum() and len(key) <= KEY_LENGTH): return True else: return False def _checkValue(self, value): if (isinstance(value, str) and value.isalnum() and len(value) <= VALUE_LENGTH): return True else: return False #check each data store for key values async def _contains_cache(self, key): return key in self.cache.keys() async def _contains_cache_ttl(self, key): return self.cache_ttl.has(key)
cache.delete(1) assert cache.get(1) is None # 清除整个缓存 print(len(cache)) cache.clear() assert len(cache) == 0 # 为get, set, delete设置了批量方法 cache.set_many({"a": 1, "b": 2, "c": 3}) assert cache.get_many(["a", "b", "c"]) print(len(cache)) assert cache.delete_many(["a", "b", "c"]) print(len(cache)) # 重置已经初始化的缓存对象 cache.configure(maxsize=1000, ttl=5 * 60) cache.set_many({'a': 1, 'b': 2, 'c': 3}) assert list(cache.keys()) == ['a', 'b', 'c'] assert list(cache.values()) == [1, 2, 3] assert list(cache.items()) == [('a', 1), ('b', 2), ('c', 3)] # 迭代整个缓存的key for key in cache: print(key, cache.get(key)) # 'a' 1 # 'b' 2 # 'c' 3 assert cache.has('a') assert 'a' in cache
def main(): gpio_ctrl = GPIOCtrl() # openDoor("dd", "djf", "dkkd") q = queue.Queue(100) scanQRCodeThread = ScanQRCode(q=q) scanQRCodeThread.start() randomStrThread = RandomStr(q=q) randomStrThread.start() web3Thread = None db = LockDB(DB_NAME) info = db.getInfo() cache = Cache(ttl=60 * 5) channel = grpc.insecure_channel("localhost:50000") stub = lock_pb2_grpc.LockStub(channel) if info.get("contractAddr"): web3Thread = Web3Thread(address=info["contractAddr"], q=q) web3Thread.start() userMap = info.get("userMap", {}) logging.info("main thread running...") while True: msg = q.get() if msg.msgType == MsgType.SCAN_QR_CODE: codeStr = msg.load codeInfo = None try: codeInfo = json.loads(str(codeStr, encoding="utf-8")) except BaseException as e: logging.info(e) continue if not isinstance(info, dict): logging.info("continue qr code deal") continue logging.info(codeInfo) salt = codeInfo.get("salt") address = codeInfo.get("addr") sign = codeInfo.get("sign") if gpio_ctrl.is_bind_status and salt and address: info["salt"] = salt info["contractAddr"] = address db.update(info) if web3Thread: web3Thread.stopThread() web3Thread.join() # q.clear() web3Thread = Web3Thread(address=address, q=q) web3Thread.start() elif sign and address: logging.info(userMap) infoList = userMap.get(address) logging.info(infoList) if infoList: logging.warning("here") pubKey = infoList[1] decryptStr = "" resp = "" try: resp = stub.decrypt( lock_pb2.Request(pubKey=pubKey, sign=sign)) except Exception as e: logging.info(e) continue scanRanStr = resp.ranStr logging.info("decrypt str: " + scanRanStr) if cache.has(scanRanStr): openDoor(infoList[0], info.get("contractAddr", ""), info.get("salt", "")) elif msg.msgType == MsgType.ETH_UPDATE: userMap = decodeUserInfo(msg.load) info["userMap"] = userMap db.update(info) elif msg.msgType == MsgType.RandomStr: ranStr = msg.load if info.get("contractAddr"): cache.add(ranStr, None) salt = info["salt"] data = {} data["address"] = info.get("contractAddr", "") data["ranStr"] = ranStr data["salt"] = info.get("salt", "") signStr = "" for k in sorted(data.keys()): signStr = signStr + k + "=" + data[k] + ";" md5 = hashlib.md5() md5.update(signStr.encode(encoding="utf-8")) data["sign"] = md5.hexdigest() data.pop("salt") requests.post(url=BASE_URL + "/api/lock/randomstr", data=data, verify=False)