def clear(self): search = self.key_prefix + '*' for key in cache.iter_keys(search): cache.delete(key) for key in cache.keys('*.home.*'): cache.delete(key)
def get(self, request): """成交分笔明细""" db = self.mongo_conn.hotcow self.collection = db.trading_data tasks = [] if Base(TradingDay, **{ 'day': str(datetime.date.today()) }).findfilter(): sk_all = cache.iter_keys('cache_code_info_*') for i in sk_all: sk_info = cache.get(i) if sk_info and sk_info[ 'market_value'] and sk_info['market_value'] <= 500: if not Base( MyChoiceData, **{ 'code': sk_info['code'], 'trading_day': str(datetime.date.today()), }).findfilter(): tasks.append(self._constantly_deal( sk_info['exchange'])) asyncio.set_event_loop(asyncio.new_event_loop()) # 创建新的协程 loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.wait(tasks)) loop.close() return Response({'MainFlows': 'data update node'})
def test_iter_keys(self): cache.set("foo1", 1) cache.set("foo2", 1) cache.set("foo3", 1) # Test simple result result = set(cache.iter_keys("foo*")) assert result == set(["foo1", "foo2", "foo3"]) # Test limited result result = list(cache.iter_keys("foo*", itersize=2)) assert len(result) == 3 # Test generator object result = cache.iter_keys("foo*") assert next(result) != None
def get(self, request): data = {} if 'q' in self.request.GET: company_name = self.request.GET['q'].upper() cols = list( cache.get(next(cache.iter_keys(f"*{company_name}*"))).keys()) rows = [] for row in cache.iter_keys(f"*{company_name}*"): r = [] for col in cols: r.append(cache.get(row)[col]) rows.append(r) data['cols'] = cols data['rows'] = rows return JsonResponse(data)
def get(self, request): """同步历史资金流向""" data = request.GET tasks = [] sk_all = cache.iter_keys('cache_code_info_*') if data and 'code' in data: for i in sk_all: code = str(i).split('_')[-1] url = f"{settings.QT_URL3}data/view/ggdx.php?t=2&r=0.8876465514316253" \ f"&q={code.replace('~', '')}" tasks.append( self._read_data(url=url, num_day='day', code=code.split('~')[-1])) else: code_list = [] for i in sk_all: code_list.append(str(i).split('_')[-1].replace('~', '')) times_number = 100 for num in range(0, len(code_list) // times_number + 1): url = f"{settings.QT_URL3}data/view/ggdx.php?t=3&d=5&q=" \ f"{','.join(code_list[num * times_number:times_number * (num + 1)])}" tasks.append(self._read_data(url=url, num_day='many_day')) asyncio.set_event_loop(asyncio.new_event_loop()) # 创建新的协程 loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.wait(tasks)) loop.close() return Response({'MainFlows': 'data update node'})
def test_iter_keys(self): """Test the iter_keys cache operation""" cache.set("foo1", 1) cache.set("foo2", 1) cache.set("foo3", 1) # Test simple result result = set(cache.iter_keys("foo*")) self.assertEqual(result, {"foo1", "foo2", "foo3"}) # Test limited result result = list(cache.iter_keys("foo*", itersize=2)) self.assertEqual(len(result), 3) # Test generator object result = cache.iter_keys("foo*") self.assertIsNotNone(next(result))
def save(self, *args, **kwargs) -> None: super(BaseModel, self).save(*args, **kwargs) cache_dict: dict = self.flush_cache_keys() or self.CACHE_DICT if cache_dict['keys']: [cache.delete(key) for key in cache_dict['keys']] if cache_dict['match']: [cache.delete(key) for key in [cache.iter_keys(key) for key in cache_dict['match']]]
def getKeywords(request): """ 获取联想词列表,GET请求,直接返回关键词列表 请求参数: amount:返回的关键词的数量,默认为5 word:用户输入的关键词 """ amount = request.GET.get('amount', '5') word = request.GET['word'] if len(word) > 5 or int(amount) < 1: # 输入数据校验 raise Exception keywords = [] cache.iter_keys(word + '*') for i in range(0, int(amount)): key = next(cache.iter_keys(word + '*')) keywords.append(key) return JsonResponse(keywords, safe = False)
def test_iter_keys(self): cache = caches["default"] if isinstance(cache.client, ShardClient): self.skipTest("ShardClient doesn't support iter_keys") cache.set("foo1", 1) cache.set("foo2", 1) cache.set("foo3", 1) # Test simple result result = set(cache.iter_keys("foo*")) self.assertEqual(result, {"foo1", "foo2", "foo3"}) # Test limited result result = list(cache.iter_keys("foo*", itersize=2)) self.assertEqual(len(result), 3) # Test generator object result = cache.iter_keys("foo*") self.assertNotEqual(next(result), None)
def post(self, request, query): response = HttpResponse(content_type='text/csv') response[ 'Content-Disposition'] = f'attachment; filename={query.lower()}.csv' writer = csv.writer(response) company_name = query.upper() cols = list( cache.get(next(cache.iter_keys(f"*{company_name}*"))).keys()) writer.writerow(cols) rows = [] for row in cache.iter_keys(f"*{company_name}*"): r = [] for col in cols: r.append(cache.get(row)[col]) rows.append(r) writer.writerow(r) return response
def test_iter_keys(self): cache = caches["default"] _params = cache._params _is_shard = _params["OPTIONS"]["CLIENT_CLASS"] == "django_redis.client.ShardClient" if _is_shard: return cache.set("foo1", 1) cache.set("foo2", 1) cache.set("foo3", 1) # Test simple result result = set(cache.iter_keys("foo*")) self.assertEqual(result, {"foo1", "foo2", "foo3"}) # Test limited result result = list(cache.iter_keys("foo*", itersize=2)) self.assertEqual(len(result), 3) # Test generator object result = cache.iter_keys("foo*") self.assertNotEqual(next(result), None)
def _forward_outbound(self, command_category=0, category=0, id=0, command=0, verification=''): """ 后端->前端 :param flag: :return: """ try: if self.category_id_list is None or self.terminal_id_list is None: self._websocket.send(json.dumps({'code': 400})) # 发送一个400状态码 return # 暴力拿全部keys,后面可以根据model的名称获得, 注意iter_keys返回一个生成器 cache_names = [i for i in cache.iter_keys("*")] # 打开配置文件, 获取终端信息键值对 with open('./conf/category.conf') as f: terminals = eval(f.read()) exist_category_id_list = [] exist_terminal_id_list = [] # 改成(category_id:terminal_id) 方便查询 # set是去重 category_terminal_set = set([(k, v) for k in self.category_id_list for v in self.terminal_id_list]) # print("键值对", category_terminal_set) for category_id, terminal_id in category_terminal_set: category_id = int(category_id) terminal_id = int(terminal_id) for i in cache_names: # 遍历缓存中的名称 for name in terminals.values(): # 遍历配置表中的内容,组合名称查看是否存在于缓存中 if (name + '%02d' % category_id + '%02d' % terminal_id) == i: exist_category_id_list.append(category_id) exist_terminal_id_list.append(terminal_id) data = json.dumps({ "data": { "category_id_list": exist_category_id_list, "terminal_id_list": exist_terminal_id_list } }) self._websocket.send(data) finally: # 届时写个logger # print("autoBridge回应状态结束") pass
def test_iter_keys(self): cache = get_cache("default") _params = cache._params _is_shard = _params["OPTIONS"]["CLIENT_CLASS"] == "django_redis.client.ShardClient" if _is_shard: return cache.set("foo1", 1) cache.set("foo2", 1) cache.set("foo3", 1) # Test simple result result = set(cache.iter_keys("foo*")) self.assertEqual(result, set(["foo1", "foo2", "foo3"])) # Test limited result result = list(cache.iter_keys("foo*", itersize=2)) self.assertEqual(len(result), 3) # Test generator object result = cache.iter_keys("foo*") self.assertNotEqual(next(result), None)
def get_pokemons_from_cache(): pokemons_list = [] name_list = [] pokemons_in_cache = cache.iter_keys("pokemon_*") for pokemon_in_cache in pokemons_in_cache: name_list.append(str(pokemon_in_cache)) # pokemons_list.append(cache.get(str(pokemon_in_cache))) temp_list = cache.get_many(name_list) for key, pokemon_data in temp_list.items(): pokemon_data.update({'image': '/static/mapview/icons/{0}.png'.format(str(pokemon_data['pokemon_id']))}) pokemons_list.append(pokemon_data) return pokemons_list
def get(self, request): """同步当天资金流向""" if Base(TradingDay, **{'day': datetime.date.today()}).findfilter(): tasks = [] sk_all = cache.iter_keys('cache_code_info_*') for i in sk_all: code = cache.get(i) tasks.append(self._ma_day(code['exchange'], )) asyncio.set_event_loop(asyncio.new_event_loop()) # 创建新的协程 loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.wait(tasks)) loop.close() return Response({'MainFlows': 'data update node'})
def test_iter_keys(self): cache = caches['default'] _params = cache._params _is_shard = _params['OPTIONS']['CLIENT_CLASS'] == 'django_redis.client.ShardClient' if _is_shard: return cache.set('foo1', 1) cache.set('foo2', 1) cache.set('foo3', 1) # Test simple result result = set(cache.iter_keys('foo*')) self.assertEqual(result, {'foo1', 'foo2', 'foo3'}) # Test limited result result = list(cache.iter_keys('foo*', itersize=2)) self.assertEqual(len(result), 3) # Test generator object result = cache.iter_keys('foo*') self.assertNotEqual(next(result), None)
def readRecentMsgFromCache(roomId, refreshTime): ''' A method to read a list of message from cache since last refreshTime ''' global message_prefix it = cache.iter_keys(message_prefix % (roomId) + '*') # Iterate all the message in the cache result = [] for key in it: metadata = cache.get(key) if long(metadata['time']) > long(refreshTime): # the msg is newer than the last refresh time result.append(metadata) return result
def get(self, request): """同步历史交易""" tasks = [] sk_all = cache.iter_keys('cache_code_info_*') for i in sk_all: code = cache.get(i) tasks.append(self._close_day(code['sid'], code['exchange'])) if tasks: asyncio.set_event_loop(asyncio.new_event_loop()) # 创建新的协程 loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.wait(tasks)) loop.close() return Response({'HistoryDeals': 'data update node'})
def readUsersFromCache(roomId): ''' A method to read a list of users in the chatroom from cache @param roomId: the id of the chatroom @return: a list of Userinfo of users in the chatroom ''' global user_prefix it = cache.iter_keys(user_prefix % (roomId) + '*') # Iterate all the user in the it result = [] for key in it: username = cache.get(key) user = User.objects.get(username__exact = username) ui = UserInfo.objects.get(user = user) result.append(ui) return result
def get_top_girls(): ''' Getting top 100 girls by hits number ''' cached = cache.get("cached_result") if cached: return cached name_list = [post for post in cache.iter_keys("girl_*")] result_post_dict = cache.get_many(name_list) sorted_post_list = sorted(result_post_dict.items(), key=lambda e: e[1][1], reverse=True) top_ids_list = [ int(top_post[0].split('_')[1]) for top_post in sorted_post_list[:100] ] cache.set("cached_result", top_ids_list, timeout=90) return top_ids_list
def handle(self, *args, **options): to_create = [] for key in cache.iter_keys("visit_*"): user_id, ad_id = self.parse_key(key) value = cache.get(key) if not value: continue number = value['number'] modified = value['actual'] if number == 1: if modified: obj = Visits(user_id=user_id, number=number, ad_id=ad_id) to_create.append(obj) else: if modified: Visits.objects.filter(user_id=user_id, ad_id=ad_id).update(number=number) cache.set(key, {'number': number, 'actual': False}, timeout=None) # Если имя не определено, значит кеш был пустым try: key except NameError: self.stdout.write(self.style.SUCCESS('No visits to create or update')) return Visits.objects.bulk_create(to_create) self.stdout.write(self.style.SUCCESS('Successfully create visits'))
def execute_limit_order(order): # get all the correspond key in cache s_b = 'sell' if order.s_b == 'sell': s_b = 'buy' product_code = order.product_code keys = cache.iter_keys(s_b + '-' + product_code + '-' + '*') for key in keys: value = cache.get(key) print(value.dict()) # the seller's price < buyer's price print(key) if value.s_b == 'sell' and value.price <= order.price and value.remain > 0: transaction_service.execute_limit_market_transaction(order, value) elif value.s_b == 'buy' and value.price >= order.price and value.remain > 0: transaction_service.execute_limit_market_transaction(order, value) if order.remain == 0: break # the new order hasn't be done if order.remain != 0: cache.add(get_key(order), order, timeout=None) else: transaction_service.save_completed_order(order.id)
def purge(domain, languages): for key in cache.iter_keys('dcl_sites:domain_languages:%s:*' % domain): if key.split(':')[-1] not in languages: cache.delete(key)
def release_all_locks_of_user(user_id: str) -> None: for key in cache.iter_keys('lock:*'): if str(cache.get(key)) == user_id: cache.delete_pattern(key)
def purge(domains): for key in cache.iter_keys('dcl_sites:domain:*'): if key not in domains: cache.delete(key)
def sync_database(): for session_key in cache.iter_keys("fav_item_id_*"): item_id = session_key.split('_')[-1] exist_records = LofterModel.objects.filter(id=int(item_id))[0] exist_records.fav_click = int(cache.get(session_key)) exist_records.save()
def handle(self, **options): count = 0 for key in cache.iter_keys("*"): count += 1 self.stdout.write("Cleared {} cache keys".format(format(count, ","))) cache.clear()
def purge(domain, urls): for key in cache.iter_keys('dcl_sites:domain_url:%s:*' % domain): if key.split(':')[-1] not in urls: cache.delete(key)