class KeyValueDatabase: def __init__(self, database=cache_db): self.db = Redis(host=redis_host, db=database) def get(self, key): key = md5(key).hexdigest() result = self.db.get(key) try: return safe_eval(result) except: return result def set(self, key, value): """ key: unique keyword value: value of key timeout: cache time (int) """ key = md5(key).hexdigest() return self.db.set(key, value) def remove(self, key): key = md5(key).hexdigest() self.db.delete(key) def flush(self): return self.db.flushdb()
class Result: def __init__(self): self.db = Redis(host=HOST, db=RESULT, port=PORT) def set(self, key, value): key = md5(key).hexdigest() return self.db.set(key, value) def get(self, key): key = md5(key).hexdigest() return self.db.get(key) def remove(self, key): key = md5(key).hexdigest() return self.db.delete(key)
async def listen_coin_minute_task(loop): err_data_List = [] has_data = 0 session = None await main(loop) while True: redis_obj = Redis.getInstance() pre_item = redis_obj.customer("coin_all_minute:queue") if pre_item: if not session: session = aiohttp.ClientSession(conn_timeout=1800) has_data = 1 pre_item = json.loads(pre_item.decode().replace("'", '"')) collect_info("listen_coin_minute_task pre_item is %s" % str(pre_item)) err_data_List = await run_paraser_item(pre_item, -1, err_data_List, session) else: if has_data: num = redis_obj.getListLen("coin_all_minute:queue") if num: continue has_data = 0 await print_error_item(-1, err_data_List) if session: await session.close() session = None err_data_List = [] time.sleep(5)
async def getReportUpdateTime(key="update_report", item=""): redis_obj = Redis.getInstance() update_report = redis_obj.get(key) if update_report is None: return "" update_report = json.loads(update_report.decode()) if not isinstance( update_report, dict) else update_report res = update_report.get(item, {}).get("update_at", "") return res
async def init_db(loop): await orm.create_pool(loop=loop, **configs.db) redis_obj = Redis.getInstance() data_path = os.path.join(root_path, "data/dbData/data.json") with open(data_path, "r") as load_f: load_data = json.load(load_f) hotCoins = load_data.get("hotCoins") update_report = load_data.get("update_report") redis_obj.set("hotCoins", json.dumps(hotCoins)) redis_obj.set("update_report", json.dumps(update_report))
class BetInfo: def __init__(self): self.db = Redis(host=HOST, db=BET_INFO, port=PORT) def set(self, key, value): key = md5(key).hexdigest() return self.db.set(key, value) def get(self, key): key = md5(key).hexdigest() return self.db.get(key) def remove(self, key): key = md5(key).hexdigest() return self.db.delete(key) def add_key(self, keyname): return self.db.sadd('keys', keyname) def list_keys(self): return self.db.smembers('keys')
async def get_coin_name_list(request): result = {"error": 0, "data": "", "message": ""} redis_obj = Redis.getInstance() coin_name_list = redis_obj.get("coin_name_list") if not coin_name_list: dataList = await dao.findHotCoinList(all=1) redis_obj.set("coin_name_list", json.dumps(dataList)) else: dataList = json.loads(coin_name_list.decode()) result["data"] = dataList return result
async def update_folio(): while True: redis_obj = Redis.getInstance() pre_item = redis_obj.customer("folio_all_items:queue") if pre_item: pre_item = json.loads(pre_item.decode()) await calculate_portfolio(pre_item["id"], folio=None, re_cal=True, is_ratio=True) else: break time.sleep(0.1)
class Log: def __init__(self): self.db = Redis(host=HOST, db=LOG, port=PORT) def insert(self, address): self.db.incr("TotalRequest", 1) key = md5(address + str(datetime.now())).hexdigest() self.db.lpush(str(date.today()), key) self.db.set(key, "%s at %s" % (address, str(datetime.now()))) return True def total_request(self, key): return self.db.get("TotalRequest") def total_request_today(self): return self.db.llen(str(date.today())) def total_active_today(self): all = self.db.lrange(str(date.today()), 0, self.total_request_today()) return len(list(set(all)))
async def token2user(token): if not token: return None try: redis_obj = Redis.getInstance() user = redis_obj.get(token) if user is None: return None user = json.loads( user.decode()) if not isinstance(user, dict) else user redis_obj.set_expire(token, 3600 * 24) return user except Exception as e: error("token2user exception is: %s" % str(e)) return None
async def listen_folio_24_hour_task(loop): has_data = 1 await main(loop) while True: redis_obj = Redis.getInstance() pre_item = redis_obj.customer("folio_24_hour:queue") if pre_item: has_data = 1 pre_item = json.loads(pre_item.decode()) await updateHour24History(pre_item["id"], folio=None) else: if has_data: num = redis_obj.getListLen("folio_24_hour:queue") if num: continue has_data = 0 time.sleep(5)
async def update_redis(): date_time = str(int(time.time())) update_report = { "portfolio_ratios": { "update_at": date_time }, "portfolio_optimization": { "update_at": date_time }, "portfolio_basic": { "update_at": date_time }, "portfolio_ai": { "update_at": date_time }, "portfolio_style": { "update_at": date_time }, "portfolio_benchmark": { "update_at": date_time }, "portfolio_risk": { "update_at": date_time }, "portfolio_versus": { "update_at": date_time }, "coins_ai": { "update_at": date_time }, "coins_basic": { "update_at": date_time }, "coins_ratios": { "update_at": date_time }, "coins_versus": { "update_at": date_time } } redis_obj = Redis.getInstance() redis_obj.set("update_report", json.dumps(update_report))
async def main(loop, day_num): await orm.create_plug_pool(loop=loop, **configs.db) session = aiohttp.ClientSession(conn_timeout=1800) data_lists = [] for num in range(10): result_data = await fetch_async(ticker_url % (num * 100 + 1), session, toJson=True) if result_data: data_list = result_data["data"].values() data_lists.extend(data_list) bit_names = {str(val["symbol"]): val for val in data_lists} all_value = await bs4_paraser(bit_names) collect_info("init_request_down main all_value is %s" % str(all_value)) redis_obj = Redis.getInstance() if day_num == 0: folios_lists = await get_folio() for folio_val in folios_lists: redis_obj.producer( "folio_24_hour:queue", json.dumps({"id": folio_val.id}, ensure_ascii=False, default=__default)) if day_num == 1: for val in all_value: redis_obj.producer("coin_all_day:queue", json.dumps(val)) folios_lists = await get_folio() for folio_val in folios_lists: redis_obj.producer( "folio_all_items:queue", json.dumps({"id": folio_val.id}, ensure_ascii=False, default=__default)) if day_num == -1: for val in all_value: redis_obj.producer("coin_all_minute:queue", json.dumps(val)) await session.close()
async def frequency(remote_ip, path_url): try: fast_rep = 0 redis_obj = Redis.getInstance() data_val = redis_obj.getHash(remote_ip, path_url) try: data_val = json.loads(data_val.decode()) val = data_val.get("now_time", "") if isinstance( data_val, dict) else data_val num = data_val.get("num", "") if isinstance(data_val, dict) else 1 now_time = int(time.time() * 1000) pre_time = data_val.get("pre_time", "") if isinstance( data_val, dict) else now_time except: now_time = int(time.time() * 1000) pre_time = now_time val = "" num = 1 request_info("frequency val:%s" % str(val)) if not val: data = {"now_time": now_time, "num": 1, "pre_time": now_time} redis_obj.setHash(remote_ip, path_url, json.dumps(data)) redis_obj.setHashExpire(remote_ip, 3600) else: if now_time - int(val) < 200: fast_rep = 1 if num >= 60 and now_time - pre_time <= 60000: fast_rep = 1 num += 1 elif now_time - pre_time > 60000: pre_time = now_time num = 1 else: num += 1 data = {"now_time": now_time, "num": num, "pre_time": pre_time} redis_obj.setHash(remote_ip, path_url, json.dumps(data)) redis_obj.setHashExpire(remote_ip, 3600) return fast_rep except Exception as e: error("frequency exception is: %s" % str(e)) return None
def incr_score(cls, proxy_str): return Redis.connect().zincrby(cls.CACHE_KEY_PROXIES, proxy_str)
def __init__(self): self.db = Redis(host=HOST, db=SCREEN, port=PORT) self.cache = Redis(host=HOST, db=CACHE, port=PORT)
class Screen: def __init__(self): self.db = Redis(host=HOST, db=SCREEN, port=PORT) self.cache = Redis(host=HOST, db=CACHE, port=PORT) def set(self, key, value): key = md5(key).hexdigest() return self.db.set(key, value) def get(self, key): key = md5(key).hexdigest() return self.db.get(key) def remove(self, key): self.db.srem("list", key) key = md5(key).hexdigest() self.cache.delete(key) return self.db.delete(key) def add_screen(self, screen_id, form_title, content): key = md5(screen_id).hexdigest() self.db.set(key, content) self.db.sadd("list", key) screen = {"screen_id": screen_id, "form_title": form_title} self.cache.set(key, str(screen)) return True def get_suggest(self): suggest = [] for i in self.cache.keys(): suggest.append(literal_eval(self.cache.get(i))) return suggest def get_cache(self, screen_id): key = md5(screen_id).hexdigest() return literal_eval(self.cache.get(key)) def add_to_list(self, key): return self.db.sadd("list", key) def get_list(self): return list(self.db.smembers("list")) def flush(self): return self.db.flushdb()
def __init__(self): self.db = Redis(host=HOST, db=LOG, port=PORT)
def update_score(cls, proxy_str): return Redis.connect().zadd(cls.CACHE_KEY_PROXIES, 1, proxy_str)
SRCMYSQL = Mysql( host=mysql_src_item['host'], user=mysql_src_item['user'], port=mysql_src_item['port'], password=mysql_src_item['password'], charactor=mysql_src_item['charactor'], dblist=mysql_src_item['db'].split(','), ) DSTMYSQL = Mysql( host=mysql_dst_item['host'], user=mysql_dst_item['user'], port=mysql_dst_item['port'], password=mysql_dst_item['password'], charactor=mysql_dst_item['charactor'], dblist=mysql_src_item['db'].split(','), ) redisins = Redis( host=redis_item['host'], port=redis_item['port'], password=redis_item['password'], ) #初始化数据 db_table_column_info = SRCMYSQL.get_db_table_column_info( excludeTable=mysql_src_item['exclude'].split(','), includeTable=mysql_src_item['include'].split(',')) db_table_column_info = json.loads(db_table_column_info)
def __init__(self): self.db = Redis(host=HOST, db=BET_INFO, port=PORT)
def add(self): if self.proxy_valid(): return Redis.connect().zadd(self.CACHE_KEY_PROXIES, 0, self.proxy_str)
def all(cls): return list(Redis.connect().zrange(cls.CACHE_KEY_PROXIES, 0, -1))
def count(cls): return Redis.connect().zcard(cls.CACHE_KEY_PROXIES)
def del_all(cls): return Redis.connect().delete(cls.CACHE_KEY_PROXIES)
def __init__(self, database=cache_db): self.db = Redis(host=redis_host, db=database)
def remove_invalid(cls): return Redis.connect().zremrangebyscore(cls.CACHE_KEY_PROXIES, -1, 0.1)
def remove_values(cls, value): return Redis.connect().lrem(cls.CACHE_KEY_PROXIES, 1, value)
def all_valid(cls): return list(Redis.connect().zrangebyscore(cls.CACHE_KEY_PROXIES, 0.1, 1000))