def get_run_info(username=None): if username: rank = redis.zrank('scoreboard', username) highscore_first = max(0, rank - 10) p = redis.pipeline() p.zrange('scoreboard', highscore_first, highscore_first + 20, withscores=True) p.lrange('player:%s:games' % username, -40, -1) p.llen('player:%s:games' % username) raw_highscores, last_game_ids, num_games = p.execute() else: rank = None p = redis.pipeline() p.zrange('scoreboard', 0, 39, withscores=True) p.lrange('games', -40, -1) p.llen('games') raw_highscores, last_game_ids, num_games = p.execute() highscores = [] for score_user, score in raw_highscores: p = redis.pipeline() p.lindex('player:%s:games' % score_user, -1) (last_game, ) = p.execute() inactive = (int(last_game_ids[-1]) - int(last_game) > INACTIVE_COUNT) highscores.append([score_user, -score, inactive]) last_games = make_game_list(list(reversed(last_game_ids))) if username: return highscores, last_games, num_games, (rank + 1), highscore_first else: return highscores, last_games, num_games
def addRoomCard2Member(redis, transNo): """ 会员增加房卡 """ curTime = datetime.now() orderTable = ORDER_TABLE % (transNo) if not redis.exists(orderTable): log_util.debug('[%s][wechatPay][error] orderNo[%s] is not exists.' % (curTime, params['out_trade_no'])) return False goodid, memberAccount = redis.hmget(orderTable, ('num', 'account')) rType = redis.hget(GOODS_TABLE % goodid, 'type') rType = int(rType) if rType else None if rType == 2: addRoomCard2Member4Type2(redis, curTime, orderTable, memberAccount) return cardNums, present_card = redis.hmget(orderTable, ('roomCards', 'presentCards')) if not present_card: present_card = 0 try: present_card = int(present_card) except: present_card = 0 #chargeNums = TYPE_2_CARS[rType] account2user_table = FORMAT_ACCOUNT2USER_TABLE % (memberAccount ) #从账号获得账号信息,和旧系统一样 userTable = redis.get(account2user_table) groupId = redis.hget(userTable, 'parentAg') #会员ID id = userTable.split(':')[1] pipe = redis.pipeline() pipe.incrby(USER4AGENT_CARD % (groupId, id), (int(cardNums) + present_card)) #记录充值卡总额 if not redis.exists(USER4AGENT_RECHARGE % (groupId, id)): pipe.set(USER4AGENT_RECHARGE % (groupId, id), 0) pipe.incrby(USER4AGENT_RECHARGE % (groupId, id), int(cardNums)) CardMoney = getCardMoney(redis, groupId) log_util.debug('[%s][wechatPay] recharge CardMoney[%s]' % (curTime, CardMoney)) #计算分成金额 countRateOfAgent(redis, groupId, int(cardNums), CardMoney) log_util.debug( '[%s][wechatPay] recharge roomcards[%s] to account[%s] success' % (curTime, cardNums, memberAccount)) roomCards = pipe.execute()[0] pipe = redis.pipeline() ymd = datetime.now().strftime("%Y-%m-%d") useDatas = [int(cardNums), 4, roomCards] useStr = ';'.join(map(str, useDatas)) pipe.lpush(PLAYER_DAY_USE_CARD % (id, ymd), useStr) pipe.expire(PLAYER_DAY_USE_CARD % (id, ymd), SAVE_PLAYER_DAY_USE_CARD_TIME) pipe.execute()
def save_prefix_index(self,redis=None,pinyin_match=True): key = mk_complete_key(self.type) pipeline = redis.pipeline() for alias in self.aliases: words = [] words.append(alias) pipeline.sadd(mk_sets_key(self.type,alias),self.id) if pinyin_match: pinyin_full = split_pinyin(alias) pinyin_first = [] for py in pinyin_full: if len(py) > 0: pinyin_first.append(py[0]) pinyinStr = "".join(pinyin_full) words.append(pinyinStr) words.append("".join(pinyin_first)) pipeline.sadd(mk_sets_key(self.type,pinyinStr),self.id) pinyin_full = None pinyin_first = None pinyinStr = None for word in words: for xlen in range(1,len(word)): prefix = word[0:xlen] pipeline.zadd(key,0,prefix) pipeline.zadd(key,0,word+"*") pipeline.execute()
def save(self,redis=None): if redis == None: return if self.title == None or self.title.strip() == "": return data = {'title':self.title,'id':self.id} for key,value in self.exts.items(): data[key] = value pipeline = redis.pipeline() """set instance to hashset """ pipeline.hset(self.type,self.id,json.dumps(data)) """save condition fields in sorted set""" for field in self.conditions: pipeline.sadd(mk_condition_key(self.type,field,data[field]),self.id) """save aliases""" for alias in self.aliases: words = Index.split_words_for_index(alias) if len(words) == 0: return for word in words: pipeline.sadd(mk_sets_key(self.type,word),self.id) pipeline.execute() """save prefix""" if self.prefix_index_enable: self.save_prefix_index(redis)
def getYearData(year): ''' ''' if len(year)!=4: return {"error":"Invalid parameter"} redis=brconfig.getRedis() #redis=getRedis() pipe=redis.pipeline(transaction=True) keyList=redis.keys("ids:date:%s????"%year) monthList=[] for key in keyList: monthList.append(key[9:15])#like:201206 monthList=list(set(monthList))#remove duplicate yearData={} for month in monthList: monthData=getMonthData(month) errorCount=0 liveCount=0 for item in monthData: errorCount+=monthData[item]["error"] liveCount+=monthData[item]["live"] yearData[month[4:6]]={"error":errorCount,"live":liveCount,"link":"summary?month=%s"%month} return yearData
def generateSet(products,tmpSetName): ''' Generate a temporary union set for the given products list. ''' print "generateSet()" if len(products)==0: print "Parameter(products) is empty!" return None redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) sets=[] for p in products: sets.append("ids:i:android.os.Build.PRODUCT:%s"%p) pipe.sunionstore(tmpSetName,sets) ret=pipe.execute() if ret[0]==0: redis.delete(tmpSetName) print "Generated an empty set!" return None else: return tmpSetName
def generateSet(products, tmpSetName): ''' Generate a temporary union set for the given products list. ''' print "generateSet()" if len(products) == 0: print "Parameter(products) is empty!" return None redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) sets = [] for p in products: sets.append("ids:i:android.os.Build.PRODUCT:%s" % p) pipe.sunionstore(tmpSetName, sets) ret = pipe.execute() if ret[0] == 0: redis.delete(tmpSetName) print "Generated an empty set!" return None else: return tmpSetName
def reorder(self, uuid, dir): try: forward = int(dir) >= 0 except ValueError: return "faila" rel = 1 if forward else -1 with redis.pipeline() as pipe: while True: try: pipe.watch("musicaqueue") cur_queue = pipe.lrange("musicaqueue", 0, -1) found = [ ent for ent in cur_queue if json.loads(ent.decode())["uuid"] == uuid ] if len(found) != 1: return "failb" cur_index = cur_queue.index(found[0]) if (cur_index == 0 and not forward) or (cur_index == len(found) - 1 and forward): return "failc" pipe.multi() pipe.lset("musicaqueue", cur_index, cur_queue[cur_index + rel]) pipe.lset("musicaqueue", cur_index + rel, cur_queue[cur_index]) pipe.execute() break except WatchError: continue return "ok"
def verfiyRcvDatas(redis, params): """ 校验支付数据 """ curTime = datetime.now() orderTable = ORDER_TABLE % (params['out_trade_no']) if not redis.exists(orderTable): log_util.debug('[%s][wechatPay][error] orderNo[%s] is not exists.' % (curTime, params['out_trade_no'])) return False updateInfo = { 'money': params['total_fee'], 'endTime': params['time_end'], 'currency': params['fee_type'], 'orderNum': params['transaction_id'], 'type': 'successful', } pipe = redis.pipeline() try: log_util.debug('[%s][wechatPay][info] update orderInfo[%s] success.'\ %(curTime,updateInfo)) pipe.hmset(orderTable, updateInfo) pipe.srem(PENDING_ORDER, orderTable) pipe.sadd(SUCCEED_ORDER, orderTable) pipe.persist(orderTable) pipe.execute() except: log_util.debug('[%s][wechatPay][error] update orderInfo[%s] error.' % (curTime, updateInfo)) return False return True
def getTimeFilteredSet(start,end,tmpSetName): start=int(start) end=int(end) if start==0: start=int(datetime.date(2012,1,1).strftime("%s")) if end==0: end=int(time.time()) aday=3600*24 sets=[] redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) i=0 day=start while(day<=end): day=start+i*aday datestr=date.fromtimestamp(day).strftime("%Y%m%d") sets.append("ids:date:%s"%datestr) i+=1 if len(sets)>0: pipe.sunionstore(tmpSetName,sets) pipe.execute() return tmpSetName else: return None
def save(self, redis=None): if redis == None: return if self.title == None or self.title.strip() == "": return data = {'title': self.title, 'id': self.id} for key, value in self.exts.items(): data[key] = value pipeline = redis.pipeline() """set instance to hashset """ pipeline.hset(self.type, self.id, json.dumps(data)) """save condition fields in sorted set""" for field in self.conditions: pipeline.sadd(mk_condition_key(self.type, field, data[field]), self.id) """save aliases""" for alias in self.aliases: words = Index.split_words_for_index(alias) if len(words) == 0: return for word in words: pipeline.sadd(mk_sets_key(self.type, word), self.id) pipeline.execute() """save prefix""" if self.prefix_index_enable: self.save_prefix_index(redis)
def listItem(redis, itemid, sellerid, price): """ 售卖商品 将商品放到市场上销售,程序需要将被销售的商品添加到记录市场正在销售商品的有序集合里面, 并且在添加操作执行的过程中,监视卖家的包裹以确保被销售的商品的确存在于卖家的包裹中。 :param redis: :param itemid: :param sellerid: :param price: :return: """ inventory = "test:inventory:%s" % str(sellerid) item = "%s.%s" % (str(itemid), str(sellerid)) end = time.time() + 5 p = redis.pipeline() while time.time() < end: try: # 监视用户包裹发生的变化 p.watch(inventory) # 如果指定的商品不在用户的包裹里面,那么停止对包裹的监视并返回一个空值 if not p.sismember(inventory, itemid): p.unwatch() return 3 # 把销售的商品添加到商品买卖市场里面。 p.multi() p.zadd("test:market:", {item: price}) p.srem(inventory, itemid) # 如果执行execute方法没有引发WatchError异常,那么说明事务执行成功,并且对包裹键的监视也已经结束。 p.execute() return 1 except WatchError: # 用户的包裹已经发生了变化,重试。 pass return 2
def create_chat(redis, sender, recipients, message, chat_id=None): """ 创建聊天群组 :param redis: :param sender: 发送者 :param recipients: 多接收者 :param message: 消息 :param chat_id: 群组ID :return: """ key_chat_group_ids = CHAT_GEN_IDS # 获取新的群组ID chat_id = chat_id or (redis.incr(key_chat_group_ids)) # 创建一个由用户和分值组成的字典,字典里面的信息将被添加到有序集合里面。 recipients.append(sender) recipientsd = {r: 0 for r in recipients} key_chat_group = CHAT_GRUOP.format(chat_id=chat_id) p = redis.pipeline() # 将所有参与群聊的用户添加到有序集合里面。 p.zadd(key_chat_group, recipientsd) for r in recipients: key_user_seen = CHAT_USER_SEEN.format(user=r) # 初始化已读有序集合 p.zadd(key_user_seen, {chat_id: 0}) p.execute() # 发送消息 return send_message(redis, chat_id, sender, message)
def test_pipe(redis): @redis_server(redis_objs=['client']) def pipe(client): return 1337 redis = redis.pipeline() assert pipe(redis) == 1337
def pip(self,line): cmdLines=line.split() if (len(cmdLines)<2): print("Express error,such as: pip path") return exists=os.path.exists(" ".join(cmdLines[1:])) if not exists: print("File not found.") return redis=self.redis pipeline = redis.pipeline(transaction=False) with open(cmdLines[1], 'rt') as f: for (num,line) in enumerate(f): line=line.strip() if(len(line)<0): continue cmdLines=line.split() if(cmdLines[0].startswith('#')): continue print(cmdLines) try: if(len(cmdLines)==2): operator.methodcaller(cmdLines[0]," ".join(cmdLines[1:]))(pipeline) if(len(cmdLines)>=3): operator.methodcaller(cmdLines[0],cmdLines[1]," ".join(cmdLines[2:]))(pipeline) except AttributeError as e: print('Unsupported operation,this cmd {} will be ignored. error info is:{}'.format(cmdLines,e)) pipeline.execute()
def genErrorSetByTime(): redis=getRedis() pipe = redis.pipeline(transaction=True) #ids aday=3600*24 current=int(time.time()) todaystart=(current/aday)*aday#Get today start enday=int(datetime.date(2012,1,1).strftime("%s")) start=current end=current errorSet="ids:e" redis.sdiffstore(errorSet,['ids:b','ids:b:CALL_DROP']) i=0 while(start>enday): start=todaystart-i*aday end=start+aday timestr=datetime.date.fromtimestamp(start).strftime("%Y%m%d") ret=redis.exists("ids:date:%s"%timestr) if ret: pipe.sinterstore("ids:error:%s"%timestr,[errorSet,"ids:date:%s"%timestr]) pipe.execute() i+=1
def info_release_product(token): redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) #Get accessible product list result=getAccessibleProducts(token) if result==None: print "No accessible products!" return {"error":"No accessible products!"} if ('error' in result): print "Error in result:%s"%result['error'] return result products=result platforms=redis.sort('set:platform:names',alpha=True) for platform in platforms: pipe.smembers('set:%s:products'%platform) ret=pipe.execute() results={} i=0 for platform in platforms: l=list(set(products)&set(ret[i])) if (len(l)>0): results[platform]=l i+=1 return results
def test_process_flow(self, x=10): time_stamp = 1523514396 try: redis = self.get_redis() pipe = redis.pipeline() key_history = 'history-set-test' key_result_ids = 'result-id-set-test' key_result_ids_temp = 'result-id-set-test-temp' key_result_objs = 'result-obj-hash-test' pipe.watch(key_result_ids, key_history) pipe.multi() pipe.zunionstore(key_result_ids_temp, {key_result_ids:1, key_history:0}, aggregate='MIN') pipe.zremrangebyscore(key_result_ids_temp, 0, 0) pipe.zremrangebyrank(key_result_ids_temp, x, -1) pipe.zrangebyscore(key_result_ids_temp, '-inf', '+inf') pipe.zunionstore(key_history, {key_result_ids_temp:time_stamp, key_history:1}, aggregate='MAX') pipe.zremrangebyrank(key_history, 800, -1) results = pipe.execute() pipe.unwatch() #results = redis.zrange(key_result_ids, 0, 10, withscores=True) #results = redis.zrange(key_history, -10, -1, withscores=True) #results = redis.zrange(key_result_ids_temp, 0, x - 1, withscores=True) print results #except redis.exceptions.WatchError: except Exception, e: # 发生watcherror时业务应该怎样处理,丢弃事务或者重试 print e pass
def save_prefix_index(self, redis=None, pinyin_match=True): key = mk_complete_key(self.type) pipeline = redis.pipeline() for alias in self.aliases: words = [] words.append(alias) pipeline.sadd(mk_sets_key(self.type, alias), self.id) if pinyin_match: pinyin_full = split_pinyin(alias) pinyin_first = [] for py in pinyin_full: if len(py) > 0: pinyin_first.append(py[0]) pinyinStr = "".join(pinyin_full) words.append(pinyinStr) words.append("".join(pinyin_first)) pipeline.sadd(mk_sets_key(self.type, pinyinStr), self.id) pinyin_full = None pinyin_first = None pinyinStr = None for word in words: for xlen in range(1, len(word)): prefix = word[0:xlen] pipeline.zadd(key, 0, prefix) pipeline.zadd(key, 0, word + "*") pipeline.execute()
def purchase_item(redis, buyerid, itemid, sellerid, lprice): buyer = "test:users:%s" % str(buyerid) seller = "test:users:%s" % str(sellerid) item = "%s.%s" % (str(itemid), str(sellerid)) inventory = "test:inventory:%s" % str(buyerid) end = time.time() + 5 p = redis.pipeline() while time.time() < end: try: # 对商品买卖市场以及买家的个人信息进行监视。 # 监视商品是确保m买家想要购买的商品仍然有售(或者商品被别人) p.watch("test:market:", buyer) price = p.zscore("test:market:", item) funds = int(p.hget(buyer, "funds")) # 钱不够 if funds < 0 or price != lprice or price > funds: p.unwatch() return 3 # 将买家支付的钱转移给卖家,然后将被购买的商品转交给买家 p.multi() p.hincrby(seller, "funds", int(price)) p.hincrby(buyer, "funds", -int(price)) p.sadd(inventory, itemid) p.zrem("test:market:", item) p.execute() return 1 except WatchError: pass return 2
def leave_chat(redis, chat_id, user): """ 离开群组 :param redis: :param chat_id: :param user: :return: """ key_chat_group = CHAT_GRUOP.format(chat_id=chat_id) key_user_seen = CHAT_USER_SEEN.format(user=user) key_chat_message_ids = CHAT_MESSAGE_GEN_IDS.format(chat_id=chat_id) key_chat_message = CHAT_MESSAGE.format(chat_id=chat_id) p = redis.pipeline() p.zrem(key_chat_group, user) p.zrem(key_user_seen, chat_id) # 查找群组剩余成员数量 p.zcard(key_chat_group) if not p.execute()[-1]: # 删除群组 p.delete(key_chat_message) p.delete(key_chat_message_ids) p.execute() else: oldest = redis.zrange(key_chat_group, 0, 0, withscores=True) # 删除那些已经被所有成员阅读过的消息。 redis.zremrangebyscore(key_chat_group, 0, oldest[0][1])
def set_hash(self, model, retries=None): ''' Set the hash for an object... ''' obj_key = self.build_key(model) hash_key = '{key}:hash'.format(key=obj_key).encode('utf-8') model_hash = model.hash with self as redis: with redis.pipeline() as pipe: while 1: try: pipe.watch(hash_key) old_hash = pipe.get(hash_key) pipe.multi() if old_hash: pipe.delete(old_hash) pipe.set(hash_key, model_hash) pipe.set(model_hash, obj_key.encode('utf-8')) pipe.execute() break except WatchError: continue
def info_release_product(token): redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) #Get accessible product list result = getAccessibleProducts(token) if result == None: print "No accessible products!" return {"error": "No accessible products!"} if ('error' in result): print "Error in result:%s" % result['error'] return result products = result platforms = redis.sort('set:platform:names', alpha=True) for platform in platforms: pipe.smembers('set:%s:products' % platform) ret = pipe.execute() results = {} i = 0 for platform in platforms: l = list(set(products) & set(ret[i])) if (len(l) > 0): results[platform] = l i += 1 return results
def submit(): if request.method == "GET": return render_template("submit.html") flag = request.form["flag"] try: challenge, date = decode_flag(flag) except ValueError as ex: return render_template("submit.html", error_message=ex.args[0]) user_id = int(session["gitlab"]["id"]) user = gitlab_api.users.get(user_id) login = user.username repo_url = GITLAB_URL + "/" + GITLAB_GROUP + "/" + login sheet = gdoc.get_sheet() task_score = get_task_score( challenge, gdoc.is_deadline_extended_for_login(sheet, login)) gdoc.put_score_in_gdoc(sheet, challenge, login, task_score, user.name, repo_url) pipe = redis.pipeline() pipe.hset("result:{}:{}".format(login, challenge), "ok", "1") pipe.hset("result:{}:{}".format(login, challenge), "flag", flag) pipe.execute() return redirect(url_for("main_page"))
def getYearData(year): ''' ''' if len(year) != 4: return {"error": "Invalid parameter"} redis = brconfig.getRedis() #redis=getRedis() pipe = redis.pipeline(transaction=True) keyList = redis.keys("ids:date:%s????" % year) monthList = [] for key in keyList: monthList.append(key[9:15]) #like:201206 monthList = list(set(monthList)) #remove duplicate yearData = {} for month in monthList: monthData = getMonthData(month) errorCount = 0 liveCount = 0 for item in monthData: errorCount += monthData[item]["error"] liveCount += monthData[item]["live"] yearData[month[4:6]] = { "error": errorCount, "live": liveCount, "link": "summary?month=%s" % month } return yearData
def getTimeFilteredSet(start, end, tmpSetName): start = int(start) end = int(end) if start == 0: start = int(datetime.date(2012, 1, 1).strftime("%s")) if end == 0: end = int(time.time()) aday = 3600 * 24 sets = [] redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) i = 0 day = start while (day <= end): day = start + i * aday datestr = date.fromtimestamp(day).strftime("%Y%m%d") sets.append("ids:date:%s" % datestr) i += 1 if len(sets) > 0: pipe.sunionstore(tmpSetName, sets) pipe.execute() return tmpSetName else: return None
def getMonthData(month): ''' ''' if len(month) != 6: return {"error": "Invalid parameter"} redis = brconfig.getRedis() #redis=getRedis() pipe = redis.pipeline(transaction=True) pipe.keys("ids:error:%s??" % month) pipe.keys("ids:live:%s??" % month) errorList, liveList = pipe.execute() dayList = [] for key in errorList: pipe.scard(key) pipe.scard("ids:live:%s" % key[10:18]) dayList.append(key[16:18]) result = pipe.execute() data = {} retLen = len(result) / 2 for i in range(retLen): data[dayList[i]] = { "error": result[i * 2], "live": result[i * 2 + 1], "link": "errors?date=%s%s" % (month, dayList[i]) } return data
def player_latest_game(username): p = redis.pipeline() p.lindex('player:%s:games' % username,-1) (last,) = p.execute() return jsonify( last = last, )
def purchase_item(redis, buyerid, itemid, sellerid, lprice): buyer = "test:users:%s" % str(buyerid) seller = "test:users:%s" % str(sellerid) item = "%s.%s" % (str(itemid), str(sellerid)) inventory = "test:inventory:%s" % str(buyerid) # 尝试获取锁 market = "test:market:" ident = acquire_lock(redis, market, acquire_timeout=10) if not ident: return 2 p = redis.pipeline(True) try: p.zscore(market, item) p.hget(buyer, "funds") price, funds = p.execute() # 钱不够 if price is None or funds < 0 or price != lprice or price > funds: return 3 p.hincrby(seller, "funds", int(price)) p.hincrby(buyer, "funds", -int(price)) p.sadd(inventory, itemid) p.zrem("test:market:", item) p.execute() return 1 except: return 4 finally: release_lock(redis, market, ident)
def prepare_process_flow(self, num=1000): time_stamp = 1523514396 v = 'f382460a7c7c3597ef34b139ecda7e59' value = '' for i in range(0, 100): value += v try: redis = self.get_redis() pipe = redis.pipeline() key_history = 'history-set-test' key_result_ids = 'result-id-set-test' key_result_objs = 'result-obj-hash-test' pipe.delete(key_history) pipe.delete(key_result_ids) pipe.delete(key_result_objs) #for i in range(0, num/2): # pipe.zadd(key_history, '10000000000', str(2 * i)) for i in range(0, num): pipe.zadd(key_result_ids, (float)(i) / 1000000000000.0 + 1, i) for i in range(0, num): pipe.hset(key_result_objs, str(i), value) pipe.execute() except Exception, e: # 发生watcherror时业务应该怎样处理,丢弃事务或者重试 print e
def game_info(game_id): p = redis.pipeline() p.hget('game:%s' % game_id, 'p1') p.hget('game:%s' % game_id, 'p2') p.hget('game:%s' % game_id, 'elodiff') player1, player2, elodiff = p.execute() rank1 = redis.zrank('scoreboard', player1) if rank1 is None: rank1 = -1 else: rank1 = rank1 + 1 rank2 = redis.zrank('scoreboard', player2) if rank2 is None: rank2 = -1 else: rank2 = rank2 + 1 game_log_name = "log/%08d/%04d.json" % (game_id / 1000, game_id % 1000) return jsonify( game_id=game_id, game_log_name=game_log_name, player1=player1, player2=player2, rank1=rank1, rank2=rank2, finished=elodiff is not None, elodiff=(float(elodiff) if elodiff else None), )
def rate(user_id, value, film_id=None, actor_id=None, director_id=None, type=1, overwrite=True, check_if_exists=False): user_key = "user:%s:ratings" % user_id key = _rating_key(film_id, actor_id, director_id, type) assert user_id if not overwrite or check_if_exists: exists = redis.get("user:%s:rating:%s" % (user_id, key)) is not None if not overwrite and exists: return exists else: exists = None with redis.pipeline() as pipe: if value: if type == 1 and film_id: pipe.hset(user_key, film_id, value) pipe.sadd("users", user_id) pipe.hset("ratings:%s" % key, user_id, value) pipe.set("user:%s:rating:%s" % (user_id, key), value) else: if type == 1: pipe.hdel(user_key, film_id) pipe.hdel("ratings" + key, user_id) pipe.delete("user:%s:rating:%s" % (user_id, key)) pipe.execute() return exists
def update_redis_key(redis, key, delta): """Update redis instance values. In details, add 'delta' to the current value of the key 'key'. To avoid dirty reads, do it within a transaction. redis (redis.client.StrictRedis): an established connection to a Redis instance. key (string): the name of the key that has to be updated. delta (int): the value to be added to 'key'. """ # Redis transaction. with redis.pipeline() as pipe: while 1: try: pipe.watch(key) current_value = pipe.get(key) if current_value is None: current_value = 0 next_value = int(current_value) + delta pipe.multi() pipe.set(key, next_value) pipe.execute() break except WatchError: continue
def acquire_semaphore(redis, semname, limit, timeout=10): """ 每当锁或者信号量因为系统时钟的细微不同而导致的获取结果出现剧烈变化时,这个锁或者信号量就是不公平的(unfair). 不公平的锁和信号量可能会导致客户端永远也无法获得它原本应该得到的锁或信号量。 :param redis: :param semname: :param limit: :param timeout: :return: """ semname = "semaphore:" + semname ident = str(uuid.uuid4()) now = time.time() p = redis.pipeline(True) # 清理过期的信号量持有者 p.zremrangebyscore(semname, '-inf', now-timeout) # 尝试获取信号量 p.zadd(semname, {ident: now}) p.zrank(semname, ident) res = p.execute() if res[-1] < limit: return ident # 获取信号量失败,删除之前添加的标识符 redis.zrem(semname, ident) return None
def save(self): with redis.pipeline() as pipeline: pipeline.hmset('host:' + self.name, { 'host': self.host, }) pipeline.sadd('hosts', self.name) pipeline.execute() self._update_ext_config()
def search_titles(query, en_pipe, rd_conn, model_key): redis_pipe = redis.pipeline() query_vector = en_pipe(query).vector.reshape(1, -1) n_neigbors = pickle.loads(redis.get(model_key)) _, indices = n_neigbors.kneighbors(query_vector) del n_neigbors for index in indices[0]: redis_pipe.get(bytes(str(index), encoding='utf-8')) return redis_pipe.execute()
def do_except(uid_tid_lid): (redis.pipeline().lpush(EDM_WEB_MAIL_IMPORT_ERROR_QUEUE, uid_tid_lid).hincrby(COMMON_VAR_COUNT_HASH, 'edm_web_mail_import_couont', -1).execute()) # redis.rpush(EDM_WEB_MAIL_IMPORT_ERROR_QUEUE, uid_tid_lid) # redis.hincrby(COMMON_VAR_COUNT_HASH, 'edm_web_mail_import_couont', -1) close_connect() sys.exit(1)
def _sadd_multi(self, key, values, domain): name = self._make_name(key, domain) redis = self._get_redis(name) pipe = redis.pipeline() for value in values: pipe.sadd(name, self._data_to_string(value)) ret = pipe.execute() self._notify(key, values, domain, self.SADD_MULTI) return ret
def __init__(self, key_prefix, limit, per, send_x_headers): self.reset = (int(time.time()) // per) * per + per self.key = key_prefix + str(self.reset) self.limit = limit self.per = per self.send_x_headers = send_x_headers p = redis.pipeline() p.incr(self.key) p.expireat(self.key, self.reset + self.expiration_window) self.current = min(p.execute()[0], limit)
def __init__(self, redis=None, prefix='bayes:', correction=0.1, tokenizer=None): self.redis = redis self.prefix = prefix self.correction = correction self.tokenizer = tokenizer or english_tokenizer if not self.redis: import redis redis = redis.Redis() self.rpipe = redis.pipeline()
def __setitem__(self, id, data): redis = self.redis if not redis.sismember(self.key, id): raise KeyError(id) now = str(time.time()) pipe = redis.pipeline() pipe.set(id, data) pipe.set('%s.mtime' % id, now) pipe.set(self.mtime_key, now) pipe.execute()
def recent_gists(start=None): start = 0 if start == None else start end = start + 30 if start is not None else -1 pipe = redis.pipeline() for id in redis.zrevrange('index', start, end): pipe.hmget("gist:#"+str(id), 'payload') return map(lambda x: json.loads(x[0]), filter(lambda x:x is not None, pipe.execute()))
def mark_online(user_id): now = int(time.time()) expires = now + (app.config['ONLINE_LAST_MINUTES'] * 60) + 10 all_users_key = 'online-users/%d' % (now // 60) user_key = 'user-activity/%s' % user_id p = redis.pipeline() p.sadd(all_users_key, user_id) p.set(user_key, now) p.expireat(all_users_key, expires) p.expireat(user_key, expires) p.execute()
def mark_online(user_id): #将一个用户标记为online now = int(time.time()) #当前的UNIX时间戳 expires = now + (app.config['ONLINE_LAST_MINUTES'] * 60) + 10 #过期的UNIX时间戳 all_users_key = 'online-users/%d' % (now // 60) #集合名,包含分钟信息 user_key = 'user-activity/%s' % user_id p = redis.pipeline() p.sadd(all_users_key, user_id) #将用户id插入到包含分钟信息的集合中 p.set(user_key, now) #记录用户的标记时间 p.expireat(all_users_key, expires) #设定集合的过期时间为UNIX的时间戳 p.expireat(user_key, expires) p.execute()
def log(host, data): if type(data) == dict: facts = data.get('ansible_facts', None) redis_pipe = redis.pipeline() for fact in facts: # Only store the basic types (strings) of facts #if isinstance(facts[fact], basestring): redis_pipe.hset(host, fact, facts[fact]) #redis_pipe.expire(host, FACT_EXPIRATION) redis_pipe.execute()
def mark_online(user_ip, cookie_id): now = int(time.time()) expires = now + (ONLINE_LAST_MINUTES * 60) + 10 all_users_key = 'online-users/%d' % (now // 60) ip_users_key = 'ip-users/%d/%s' % ((now // 60), user_ip) user_key = 'user-activity/%s' % cookie_id p = redis.pipeline() p.sadd(all_users_key, user_ip) p.sadd(ip_users_key, cookie_id) p.set(user_key, now) p.expireat(all_users_key, expires) p.expireat(user_key, expires) p.expireat(ip_users_key, expires) p.execute()
def clearPagingCache(): key='paging_token' redis=getRedis() pipe = redis.pipeline(transaction=True) now=time.time() tokens=redis.hkeys(key) for token in tokens: expireTime=int(redis.hget(key,token)) if expireTime<now: pipe.hdel(key,token) pipe.delete("tmp_paging_%s"%token) pipe.execute()
def __getitem__(self, id): redis = self.redis if not redis.sismember(self.key, id): raise KeyError(id) pipe = redis.pipeline() pipe.get(id) pipe.get('%s.mtime' % id) data, mtime = pipe.execute() item = Item(data) item.__parent__ = self item.__name__ = id item.mtime = float(mtime) return item
def getUserData(phonenumber=None, imsi=None): if phonenumber==None and imsi==None: return None redis=brconfig.getRedis() #redis=getRedis() pipe=redis.pipeline(transaction=True) keyName=None backupKeyName=None if phonenumber != None: user=phonenumber queryUrl="errors?phoneNumber=%s&date="%user keyName="ids:i:phoneNumber:%s"%phonenumber backupKeyName="ids:i:phoneNumber:+86%s"%phonenumber elif imsi!=None: user=imsi queryUrl="errors?imsi=%s&date="%user keyName="ids:i:phoneNumber:IMSI:%s"%imsi else: return None ret=redis.exists(keyName) if not ret: keyName=backupKeyName ret=redis.exists(keyName) if not ret: return None aday=3600*24 current=int(time.time()) current=(current/aday)*aday#Get today start #enday=current-aday*30 start=current end=current #i=0 print keyName data={} for i in range(30): start=current-i*aday timestr=date.fromtimestamp(start).strftime("%Y%m%d") errorList=redis.sinter(keyName,"ids:error:%s"%timestr) liveList=redis.sinter(keyName,"ids:live:%s"%timestr) data[timestr]={"error":len(errorList),"live":len(liveList),"link":queryUrl+timestr} return data
def count_prefix(prefix): redis = get_redis() if redis: text = """ local n = 0 for _,k in ipairs(redis.call('keys', ARGV[1])) do n = n + 1 end return n """ script = redis.register_script(text) pipe = redis.pipeline() script(args=[prefix], client=pipe) r = pipe.execute() return r[0] return 0
def remove(redis=None,options={}): if redis == None or 'type' not in options.keys() or \ 'id' not in options.keys(): return instance_type = options['type'] instance_id = options['id'] instance = redis.hget(instance_type,instance_id) if instance == None: return instance_title = json.loads(instance)['title'] redis.hdel(instance_type,instance_id) words = Index.split_words_for_index(instance_title) pipeline = redis.pipeline() for word in words: pipeline.srem(mk_sets_key(instance_type,word),instance_id) pipeline.delete(mk_score_key(instance_type,instance_id)) pipeline.srem(mk_sets_key(instance_type,instance_title),instance_id) pipeline.execute()
def isAccessible(token,record_id): ''' Not a good implementation. ''' tmpSetName='tmp_%s'%str(uuid.uuid4()) tmpSetName=getAccessibleSet(token,tmpSetName) if tmpSetName == None: return False else: redis=brconfig.getRedis() pipe = redis.pipeline(transaction=True) pipe.sismember(tmpSetName,record_id) pipe.delete(tmpSetName) ret=pipe.execute() if ret[0]: return True else: return False
def list_item(conn, sellerid, itemid, price): inventory = 'inventory:%s' % sellerid # inventory:17 item = '%s.%s' % (itemid, sellerid) # ItemA.3 end = time.time() + 5 pipe = redis.pipeline() while time.time() < end: # 如果发生异常,这进行重试,最大重试时间是5s try: pipe.watch(inventory) # 监视用户包裹发生的变化 if not pipe.sismember(inventory, itemid): pipe.unwatch() # UNWATCH命令可以在WATCH命令执行之后,MULTI命令执行之前 return None pipe.multi() pipe.zadd('market:', item, price) pipe.srem(inventory, itemid) pipe.execute() return True except redis.exceptions.WatchError: pass return False
def items(self): # WARNING: expensive for large collections redis = self.redis ids = redis.smembers(self.key) if not ids: return [] ids = sorted(ids) pipe = redis.pipeline() for id in ids: pipe.get(id) pipe.get('%s.mtime' % id) items = [] pipedata = pipe.execute() for id in ids: item = Item(pipedata.pop(0)) item.mtime = float(pipedata.pop(0)) item.__parent__ = self item.__name__ = id items.append((id, item)) assert not pipedata # sanity check, pipedata should be exhausted return items
def getAccessibleSet(token,tmpSetName,ids=None): ''' Generate the accessible set for the given token, and return the temperorary set name. The set should be remove after use. But if for the optimization reason, the remove can be delay. ''' products=getAccessibleProducts(token) if ('error' in products): print "getAccessibleSet encounter error:%s"%products return products setName=generateSet(products,tmpSetName) if setName==None: print "Generate set fail!" return None if ids==None: return setName else: redis=brconfig.getRedis() pipe=redis.pipeline(transaction=True) tmpIds="tmp_%s" % str(uuid.uuid4()) tmpResult="tmp_%s" % str(uuid.uuid4()) for id in ids: pipe.sadd(tmpIds,id) pipe.sinterstore(tmpResult,setName,tmpIds) ret=pipe.execute() if (ret[-1]==0): pipe.delete(tmpResult) tmpResult=None pipe.delete(setName) pipe.delete(tmpIds) pipe.execute() return tmpResult
def _schedule(self): while 1: tasks = {} notices = [] while 1: key, value, domain, type = task = self._queue.get() name = self._make_name(key, domain) redis = self._get_redis(name) try: tasks[redis].append((name, key, value, domain, type)) except KeyError: tasks[redis] = [(name, key, value, domain, type)] tasks[redis].append(('@' + name, None, None, None, self.SGET)) notices.append((redis, len(tasks[redis]) - 1, task)) redis = self._get_redis(domain) try: tasks[redis].append(('#' + domain, None, None, None, self.SGET)) except KeyError: tasks[redis] = [('#' + domain, None, None, None, self.SGET)] notices.append((redis, len(tasks[redis]) - 1, task)) if self._queue.empty(): break results = {} for redis, redis_tasks in tasks.items(): pipe = redis.pipeline() for name, key, value, domain, type in redis_tasks: self._processer[type](pipe, name, value) results[redis] = pipe.execute() task_bundle = {} for redis, location, task in notices: listeners = [self._string_to_data(listener) for listener in results[redis][location]] for listener, listen_myself in listeners: if listener == self._myself.get_id() and not listen_myself: continue try: task_bundle[listener].append(task) except KeyError: task_bundle[listener] = [task] for listener, tasks in task_bundle.items(): self._notify_listeners(listener, tasks)
def getMonthData(month): ''' ''' if len(month)!=6: return {"error":"Invalid parameter"} redis=brconfig.getRedis() #redis=getRedis() pipe = redis.pipeline(transaction=True) pipe.keys("ids:error:%s??"%month) pipe.keys("ids:live:%s??"%month) errorList,liveList=pipe.execute() dayList=[] for key in errorList: pipe.scard(key) pipe.scard("ids:live:%s"%key[10:18]) dayList.append(key[16:18]) result=pipe.execute() data={} retLen=len(result)/2 for i in range(retLen): data[dayList[i]]={"error":result[i*2],"live":result[i*2+1],"link":"errors?date=%s%s"%(month,dayList[i])} return data
def product_summary(token,platform='4.0.4',callDropMode=False): ''' Give a summary for all the accessible products. The summary data can be error rate or call drop rate. @param token access token @platform android platform version, like: 2.3.3, 2.3.7, 4.0.3 or 4.0.4 @param mode work mode flag, whether error rate or call drop rate. ''' #TODO: To optimize, error count, live time, call drop and call count can be computed before query. #Get accessible product list result=getAccessibleProducts(token) print "result:%s"%result if result==None: print "No accessible products!" return {"error":"No accessible products!"} if ('error' in result): print "Error in result:%s"%result['error'] return result products=result MAX_REVISION_COUNT=5 redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) errorSet='ids:e'#all errors without call drop liveTimeSet='ids:s:com.borqs.bugreporter:LIVE_TIME' callDropSet='ids:b:CALL_DROP' callCountSet='ids:s:com.borqs.bugreporter:CALL_COUNT' tmp='tmp_%s'%uuid.uuid4() #Get revision list for every products platProducts=redis.smembers('set:%s:products'%platform) products=set(products)&set(platProducts) products=list(products) products.sort() if callDropMode: mode="calldrop" sumKey="drop" countSet=callDropSet baseSet=callCountSet else: mode="error" sumKey="error" countSet=errorSet baseSet=liveTimeSet revisions={} pLength=len(products) if pLength==0: return [] for product in products: revisionList=redis.sort('set:%s:%s:%s:revisions'%(platform,product,mode),alpha=True,desc=True)#why alpha=True? #TODO: revisionList has at least one item, otherwise the product name will not be listed here. sub=[] length=min(MAX_REVISION_COUNT,len(revisionList)) for j in range(length): sub.append(revisionList[j]) pipe.sinterstore(tmp,['ids:%s:%s:%s'%(platform,product,revisionList[j]),countSet]) pipe.sinter(['ids:%s:%s:%s'%(platform,product,revisionList[j]),baseSet]) revisions[product]=sub pipe.delete(tmp) ret=pipe.execute() #Get the count data; And save the part result to a temporary result set. #TODO: Why use temporary result? summary={} k=0 for product in products: sub=revisions[product] total=0 subSummary={} subSummary["product"]=product subSummary["mode"]=sumKey subSummary["sublist"]={} length=min(MAX_REVISION_COUNT,len(sub)) for j in range(length): revision=sub[j] subSummary["sublist"][revision]={} subSummary["sublist"][revision]["revision"]=revision errorOrDropCount=ret[k] subSummary["sublist"][revision]["count"]=int(errorOrDropCount) total+=errorOrDropCount if callDropMode: listLink="/api/brquery/query/error?android.os.Build.VERSION.RELEASE=%s&sandroid.os.Build.PRODUCT=%s&ro.build.revision=%s&e_type=CALL_DROP"%(platform,product,revision) else: listLink="/api/brquery/query/error?android.os.Build.VERSION.RELEASE=%s&android.os.Build.PRODUCT=%s&ro.build.revision=%s"%(platform,product,revision) subSummary["sublist"][revision]["link"]=listLink callOrLiveIdSet=list(ret[k+1]) if len(callOrLiveIdSet)==0: pipe.scard("ThisKeyWillNeverExist")#always return 0 else: pipe.hmget('s:values',callOrLiveIdSet) k+=2 subSummary["count"]=total subSummary["link"]="/api/brquery/query/rate?groupby=ro.build.revision&android.os.Build.VERSION.RELEASE=%s&android.os.Build.PRODUCT=%s&mode=%s"%(platform,product,sumKey) summary[product]=subSummary ret=pipe.execute() #Get the base data(livetime or callcount), and compute rate. k=0 for product in products: sub=revisions[product] total=0 length=min(MAX_REVISION_COUNT,len(sub)) for j in range(length): revision=sub[j] valueSet=ret[k] count=0 if valueSet: for value in valueSet: count+=int(value) total+=count if not callDropMode: count=count/3600 summary[product]["sublist"][revision]['base']=count if count==0: summary[product]["sublist"][revision]['rate']='N/A' else: summary[product]["sublist"][revision]['rate']='%s%%'%(summary[product]["sublist"][revision]['count']*100/count) k+=1 if not callDropMode: total=total/3600 summary[product]['base']=total if total==0: summary[product]['rate']='N/A' else: summary[product]['rate']='%s%%'%(summary[product]['count']*100/total) pipe.execute() #Format the result result=[] for product in products: subSummary={} subList=[] subSummary["product"]=summary[product]["product"] subSummary["mode"]=summary[product]["mode"] subSummary["count"]=summary[product]["count"] subSummary["base"]=summary[product]["base"] subSummary["rate"]=summary[product]["rate"] subSummary["link"]=summary[product]["link"] for revision in summary[product]["sublist"]: subList.append(summary[product]["sublist"][revision]) subSummary["sublist"]=subList result.append(subSummary) return result