def top(self): members = [x.decode() for x in redis.smembers("musicacommonset")] frequencies = map( int, redis.mget(*["musicacommon.%s" % member for member in members])) titles = [ x.decode() if x else "%s (loading)" % member for member, x in zip( members, redis.mget(*["musicatitle.%s" % member for member in members])) ] frequency = list(zip(members, titles, frequencies)) frequency.sort(reverse=True, key=lambda x: x[2]) return frequency
def loop(restaurantlist): now = dt.today() wait = waittime(now) for r in restaurantlist: menu, timestamp = redis.mget(f"{r.menu['id']}:menu", f"{r.menu['id']}:timestamp") if not timestamp: timestamp = dt.utcfromtimestamp(0) else: timestamp = dt.fromisoformat(timestamp.decode("utf-8")) do_update = False if timestamp.date() != now.date(): do_update = True elif now - timestamp > r.menu['ttl'] and menu: do_update = True elif not menu and now - timestamp > wait: do_update = True if do_update: print(f"Updating «{r.menu['name']}»") start = perf_counter() update(r.menu['id'], r.menu['get'], now) elapsed = perf_counter() - start print(f"Updating «{r.menu['name']}» took {elapsed} seconds")
def loop(restaurantlist, must_update=False): now = dt.today() wait = waittime(now) if not must_update and now.time() > time(12, 45): return for place in restaurantlist: menu, timestamp = redis.mget(f"{place.id}:menu", f"{place.id}:timestamp") if not timestamp: timestamp = dt.utcfromtimestamp(0) else: timestamp = dt.strptime(timestamp.decode("utf-8"), DATEFORMAT) menu_empty = menu == b"[]" or menu is None timestamp_is_today = timestamp.date() == now.date() timestamp_age = now - timestamp if not timestamp_is_today: do_update(place, now) elif timestamp_age > place.ttl: do_update(place, now) elif menu_empty and timestamp_age > wait: do_update(place, now)
def gAi_red_executer(redis, gnum, inum): genres = [randomname(2) for i in range(gnum)] genres_set = set(genres) vals = redis.mget( [str(random.randint(0, 9999999)).zfill(8) for i in range(inum)]) count = 0 for val in vals: loads = pickle.loads(val) loads_set = set(loads['genre']) if len(genres_set & loads_set) > 0: count += 1 return count
def process_logs(redis, path, callback): """ 处理日志文件的同时,记录被处理日志文件的名字和偏移量 :param redis: Redis 连接 :param path: 存储日志文件的路径 :param callback: 待处理日志文件中各个行(line)的回调函数(callback) :return: """ # 获取当前文件当前的处理进度 current_file, offeset = redis.mget("progress:file", "progress:position") pipe = redis.pipeline() # 通过调用闭包来减少重复代码 # 更新正在处理的日志文件名字和偏移量 def update_progress(): pipe.mset({ "progress:file": fname, "progress:position": offeset, }) pipe.execute() # 有序的遍历各个日志文件 for fname in sorted(os.listdir(path)): # 略过所有已处理的日志文件 if fname < current_file: continue # 再接着处理一个因为系统崩溃而未能完成处理的日志文件时,略过已处理的内容。 inp = open(os.path.join(path, fname), 'rb') if fname == current_file: inp.seek(int(offeset, 10)) else: offeset = 0 current_file = None # 处理日志 for lno, line in enumerate(inp): # 处理日志行 callback(pipe, line) offeset += int(offeset) + len(line) if not (lno + 1) % 1000: # 每当处理完1000个日志行或者处理完整个日志文件的时候,都更新一次文件的处理进度。 update_progress() update_progress() inp.close()
def strings_redis(): import redis #charset="utf-8", decode_responses=True => avoid b' in redis python redis = redis.Redis(host='127.0.0.1', port=6379, db=0, charset="utf-8", decode_responses=True) print("-------------") print("STRINGS") print("-------------") #info() print(redis.info()) print("-------------") #monitor() print(redis.monitor()) print("-------------") #set() redis.set("name", "javier") redis.set("name", "jaime") print("key: ", redis.get("name")) print("-------------") print("all keys: ", redis.keys()) print("keys with a 'name...': ", redis.keys("name*")) print("keys with a 'e': ", redis.keys("*e*")) print("-------------") #setnx(name, value) redis.set("name", "javier") #mset(name, value) redis.mset({"name": "peter", "name": "david"}) print("name: ", redis.mget("name")) print("-------------") #getrange(name, start, end) - substrings of the value print("range : ", redis.getrange("name", 0, 3)) #delete all keys for key in redis.scan_iter("prefix:*"): redis.delete(key)
def item_red_executer(redis, inum): val = redis.mget( [str(random.randint(0, 9999999)).zfill(8) for i in range(inum)]) return len(val)
def running_showers(): return redis.mget('shower1', 'shower2')
def get_many_features(key_prefix, ids, type=None): keys = ["%s:%s" % (key_prefix, id) for id in ids] features = redis.mget(*keys) return dict( (k, pickle.loads(v)) for (k, v) in zip(ids, features) if v )
def get_by_ids(ids): values = redis.mget( map(lambda id: Proxy._get_redis_key(id.decode('utf-8')), ids)) return list(map(lambda x: Proxy(json.loads(x)), values))
# print 'set goods id index success....[%s]'%(goods_type) # pipe.execute() ############################################################### ### exchange_table更新 ### 新增1个字段 exchange_type ### 2018-01-03 ############################################################### total = redis.llen(FISH_EXCHANGE_LIST) exchange_ids = redis.lrange(FISH_EXCHANGE_LIST, 0, -1) exchange_id_keys = [ FISH_EXCHANGE_TABLE % (exchange_id) for exchange_id in exchange_ids ] exchange_details = [ exchange_detail for exchange_detail in redis.mget(exchange_id_keys) ] exchange_info = [] for exchange_detail in exchange_details: exchange_detail = eval(exchange_detail) exchange_detail['exchange_type'] = redis.hget( FISH_REWARD_TABLE % (exchange_detail['exchange_reward_id']), 'reward_type') pipe.set(FISH_EXCHANGE_TABLE % (exchange_detail['exchange_id']), exchange_detail) print 'id [%s] setType success...' % (exchange_detail['exchange_id']) pipe.execute() # pipe.execute()
if redis.sismember("downloads:integrated", search): print("Already Integrated '%s'" % search) sys.exit(0) # Fetch all of the keys keys = redis.keys(search) if not keys: print("No keys match '%s'" % search) sys.exit(0) # Fetch all of the download counts (in batches of 200) counts = [] for batch in izip_longest(*[iter(keys)] * 200): batch = [x for x in batch if x is not None] counts.extend(redis.mget(*batch)) # Combine the keys with the counts downloads = izip( (int(y) for y in counts), (x.split(":")[-1] for x in keys), ) # Update the database c = config.Config(os.path.join(root, "config.ini")) store = store.Store(c) cursor = store.get_cursor() cursor.executemany( "UPDATE release_files SET downloads = downloads + %s WHERE filename = %s", (d for d in downloads if not set(d[1]) - set(string.printable)), )
import redis redis = redis.Redis(host="localhost", port=6379, db=0) keys = redis.keys("*") count = 0 areas = redis.mget(keys) for area in areas: if area.decode() == "Japan": count += 1 print(count)