def addRoomCard2Member(redis, transNo): """ 会员增加房卡 """ curTime = datetime.now() orderTable = ORDER_TABLE % (transNo) if not redis.exists(orderTable): log_util.debug('[%s][wechatPay][error] orderNo[%s] is not exists.' % (curTime, params['out_trade_no'])) return False goodid, memberAccount = redis.hmget(orderTable, ('num', 'account')) rType = redis.hget(GOODS_TABLE % goodid, 'type') rType = int(rType) if rType else None if rType == 2: addRoomCard2Member4Type2(redis, curTime, orderTable, memberAccount) return cardNums, present_card = redis.hmget(orderTable, ('roomCards', 'presentCards')) if not present_card: present_card = 0 try: present_card = int(present_card) except: present_card = 0 #chargeNums = TYPE_2_CARS[rType] account2user_table = FORMAT_ACCOUNT2USER_TABLE % (memberAccount ) #从账号获得账号信息,和旧系统一样 userTable = redis.get(account2user_table) groupId = redis.hget(userTable, 'parentAg') #会员ID id = userTable.split(':')[1] pipe = redis.pipeline() pipe.incrby(USER4AGENT_CARD % (groupId, id), (int(cardNums) + present_card)) #记录充值卡总额 if not redis.exists(USER4AGENT_RECHARGE % (groupId, id)): pipe.set(USER4AGENT_RECHARGE % (groupId, id), 0) pipe.incrby(USER4AGENT_RECHARGE % (groupId, id), int(cardNums)) CardMoney = getCardMoney(redis, groupId) log_util.debug('[%s][wechatPay] recharge CardMoney[%s]' % (curTime, CardMoney)) #计算分成金额 countRateOfAgent(redis, groupId, int(cardNums), CardMoney) log_util.debug( '[%s][wechatPay] recharge roomcards[%s] to account[%s] success' % (curTime, cardNums, memberAccount)) roomCards = pipe.execute()[0] pipe = redis.pipeline() ymd = datetime.now().strftime("%Y-%m-%d") useDatas = [int(cardNums), 4, roomCards] useStr = ';'.join(map(str, useDatas)) pipe.lpush(PLAYER_DAY_USE_CARD % (id, ymd), useStr) pipe.expire(PLAYER_DAY_USE_CARD % (id, ymd), SAVE_PLAYER_DAY_USE_CARD_TIME) pipe.execute()
def getUserData(phonenumber=None, imsi=None): if phonenumber == None and imsi == None: return None redis = brconfig.getRedis() #redis=getRedis() pipe = redis.pipeline(transaction=True) keyName = None backupKeyName = None if phonenumber != None: user = phonenumber queryUrl = "errors?phoneNumber=%s&date=" % user keyName = "ids:i:phoneNumber:%s" % phonenumber backupKeyName = "ids:i:phoneNumber:+86%s" % phonenumber elif imsi != None: user = imsi queryUrl = "errors?imsi=%s&date=" % user keyName = "ids:i:phoneNumber:IMSI:%s" % imsi else: return None ret = redis.exists(keyName) if not ret: keyName = backupKeyName ret = redis.exists(keyName) if not ret: return None aday = 3600 * 24 current = int(time.time()) current = (current / aday) * aday #Get today start #enday=current-aday*30 start = current end = current #i=0 print keyName data = {} for i in range(30): start = current - i * aday timestr = date.fromtimestamp(start).strftime("%Y%m%d") errorList = redis.sinter(keyName, "ids:error:%s" % timestr) liveList = redis.sinter(keyName, "ids:live:%s" % timestr) data[timestr] = { "error": len(errorList), "live": len(liveList), "link": queryUrl + timestr } return data
def getUserData(phonenumber=None, imsi=None): if phonenumber==None and imsi==None: return None redis=brconfig.getRedis() #redis=getRedis() pipe=redis.pipeline(transaction=True) keyName=None backupKeyName=None if phonenumber != None: user=phonenumber queryUrl="errors?phoneNumber=%s&date="%user keyName="ids:i:phoneNumber:%s"%phonenumber backupKeyName="ids:i:phoneNumber:+86%s"%phonenumber elif imsi!=None: user=imsi queryUrl="errors?imsi=%s&date="%user keyName="ids:i:phoneNumber:IMSI:%s"%imsi else: return None ret=redis.exists(keyName) if not ret: keyName=backupKeyName ret=redis.exists(keyName) if not ret: return None aday=3600*24 current=int(time.time()) current=(current/aday)*aday#Get today start #enday=current-aday*30 start=current end=current #i=0 print keyName data={} for i in range(30): start=current-i*aday timestr=date.fromtimestamp(start).strftime("%Y%m%d") errorList=redis.sinter(keyName,"ids:error:%s"%timestr) liveList=redis.sinter(keyName,"ids:live:%s"%timestr) data[timestr]={"error":len(errorList),"live":len(liveList),"link":queryUrl+timestr} return data
def calc_deltas(level, month, newdf, redis, ac): ''' Work out deltas (not used in app - just experiment for future possible ETL to database) ''' if redis.exists("Cases." + level + "." + month): previousdf = ac.deserialize(redis.get("Cases." + level + "." + month)) else: previousdf = EMPTY_DF previousdf.reset_index(inplace=True) newdf.reset_index(inplace=True) # Drop all zero rows previousdf = previousdf.loc[( previousdf['Cases'] + previousdf['Tests'] + previousdf['Hospital Cases'] + previousdf['Deaths within 28 Days of Positive Test']) > 0] newdf = newdf.loc[(newdf['Cases'] + newdf['Tests'] + newdf['Hospital Cases'] + newdf['Deaths within 28 Days of Positive Test']) > 0] diff_df = pd.merge(newdf, previousdf, how='outer', indicator='Exist') print(diff_df.head()) print(previousdf.memory_usage().sum()) print(diff_df.memory_usage().sum()) deltas = diff_df.loc[diff_df['Exist'] == 'left_only'] redis.set("Deltas." + level + "." + month, ac.serialize(deltas).to_buffer().to_pybytes())
def getNiuniuTileTypeList(redis, start_date, end_date): """ 获取某个时间段牛牛牌型统计表 """ try: startDate = datetime.strptime(start_date, '%Y-%m-%d') endDate = datetime.strptime(end_date, '%Y-%m-%d') except: weekDelTime = timedelta(7) weekBefore = datetime.now() - weekDelTime startDate = weekBefore endDate = datetime.now() deltaTime = timedelta(1) res = [] while startDate <= endDate: dateStr = startDate.strftime('%Y-%m-%d') if redis.exists(NIUNIU_GAME_OPERATE_BY_DAY_TABLE % dateStr): info = redis.hgetall(NIUNIU_GAME_OPERATE_BY_DAY_TABLE % dateStr) info['date'] = dateStr info['op'] = [] total = 0 for k, v in info.iteritems(): if k[:5] == 'bull_': total += int(v) info['total'] = total res.append(info) startDate += deltaTime res.reverse() return res
def genErrorSetByTime(): redis=getRedis() pipe = redis.pipeline(transaction=True) #ids aday=3600*24 current=int(time.time()) todaystart=(current/aday)*aday#Get today start enday=int(datetime.date(2012,1,1).strftime("%s")) start=current end=current errorSet="ids:e" redis.sdiffstore(errorSet,['ids:b','ids:b:CALL_DROP']) i=0 while(start>enday): start=todaystart-i*aday end=start+aday timestr=datetime.date.fromtimestamp(start).strftime("%Y%m%d") ret=redis.exists("ids:date:%s"%timestr) if ret: pipe.sinterstore("ids:error:%s"%timestr,[errorSet,"ids:date:%s"%timestr]) pipe.execute() i+=1
def submitCode(request): if request.method == "POST": code = request.POST.get("code") email = request.session["email"] if "email" in request.session else None if redis.exists(code): if redis.get(code).decode("utf-8") == email: customer = DBModel.WebsiteUsers.objects(Email=email).get() customer.isActive = True customer.save() helper = DBModel.Helpers() helper.Name = customer.Fname + " " + customer.Lname helper.Email = customer.Email helper.Password = customer.Password helper.isActive = True helper.save() request.session["islogin"] = True redis.delete(code) return HttpResponse(json.dumps({"status": "ok"})) else: return HttpResponse(json.dumps({"status": "code"})) else: return HttpResponse(json.dumps({"status": "code"}))
def handle_privmsg(client: IrcClient, ident, channel_name, msg): channel_name = normalize_channel_name(channel_name) if channel_name in channels and msg.lower().startswith('!news '): channel = channels[channel_name] while len(channel.newses) > MAX_NEWS_ENTRY_COUNT: channel.newses.pop() username = extract_username(ident) if redis.exists(str(username)): user = redis.hgetall(str(username)) display_name = user['display_name'] else: display_name = username channel.newses.appendleft(f'!news {display_name}: {msg[6:]}') try: if channel_language[channel] == "kr": client.privmsg(channel_name, f'@{username} 잠시후 메세지가 게임내 표시됩니다.') else: client.privmsg( channel_name, f'@{username} Your message will soon be displayed in few minutes.' ) except Exception: client.privmsg(channel_name, f'@{username} 잠시후 메세지가 게임내 표시됩니다.') channel_language[channel] = "kr"
def job(): global fetch_limit # Stats stat_inserted = 0 stat_existing = 0 results = soc.get(config.DATASET_IDENTIFIER, limit=fetch_limit) for result in results: # Assure that we have latitude and longitude if 'latitude' in result and 'longitude' in result: # Convert data into json format, so that it's a string serialized = json.dumps(result) # Check our cache if not redis.exists(serialized): database.insert(result) redis.set(serialized, 'true') stat_inserted += 1 else: stat_existing += 1 db.commit() print('[RETRIEVER] inserted: {}; pre-existing: {}'.format( stat_inserted, stat_existing)) fetch_limit = 2000 threading.Timer(config.PERIOD, job).start()
def _user_add(email, authenticator_secret, first_name, last_name): if redis.exists('user_id_by_email_' + email): _flash_log('User already exists: ' + email) return None redis.incr('user_counter') user_id = redis.get('user_counter') app.logger.info('create_user: %s with user id: %s', email, user_id) user_data = { 'authenticator_secret': authenticator_secret, 'email': email, 'first_name': first_name, 'last_name': last_name, 'user_id': user_id } if authenticator_secret: redis.set('user_id_by_authenticator_secret_' + authenticator_secret, user_id) redis.set( 'authenticator_secret_by_hash_' + GrantedByMe.hash_authenticator_secret(authenticator_secret), authenticator_secret) if email: redis.set('user_id_by_email_' + email, user_id) redis.set('user_by_id_' + user_id, json.dumps(user_data)) _flash_log('User created: ' + str(user_id) + ' (' + email + ')') return user_id
def verfiyRcvDatas(redis, params): """ 校验支付数据 """ curTime = datetime.now() orderTable = ORDER_TABLE % (params['out_trade_no']) if not redis.exists(orderTable): log_util.debug('[%s][wechatPay][error] orderNo[%s] is not exists.' % (curTime, params['out_trade_no'])) return False updateInfo = { 'money': params['total_fee'], 'endTime': params['time_end'], 'currency': params['fee_type'], 'orderNum': params['transaction_id'], 'type': 'successful', } pipe = redis.pipeline() try: log_util.debug('[%s][wechatPay][info] update orderInfo[%s] success.'\ %(curTime,updateInfo)) pipe.hmset(orderTable, updateInfo) pipe.srem(PENDING_ORDER, orderTable) pipe.sadd(SUCCEED_ORDER, orderTable) pipe.persist(orderTable) pipe.execute() except: log_util.debug('[%s][wechatPay][error] update orderInfo[%s] error.' % (curTime, updateInfo)) return False return True
def auto_login(user_id): """TBD""" if redis.exists('user_by_id_' + user_id): _user_login(int(user_id)) else: _flash_log('User not exists: ' + str(user_id)) return redirect(url_for('index'))
def setup_redis(): if not redis.exists(switch_key): ret = redis.hmset(switch_key, {oj: 1 for oj in SUPPORT_OJ}) if ret: logger.info('setup switch key success') else: log_spider_status()
def mag_listredis(): if request.method == 'POST': input1 = int(request.form['fromdepth']) input2 = int(request.form['todepth']) input3 = int(request.form['iternum']) res = [] cache = "sanjeev" start_t = time.time() for i in range(0, int(input3)): ran_num1 = "{:.3f}".format(random.uniform(input1, input2)) ran_num2 = "{:.3f}".format(random.uniform(input1, input2)) if r.exists(cache + str(ran_num1) + str(ran_num2)): rows = pickle.loads( r.get(cache + str(ran_num1) + str(ran_num2))) else: query = 'select count(*) from Earthquake where "depthError" BETWEEN ' + str( ran_num1) + ' AND ' + str(ran_num2) + ' ' con = sql.connect("database.db") cur = con.cursor() cur.execute(query) rows = cur.fetchall() r.set(cache + str(ran_num1) + str(ran_num2), pickle.dumps(rows)) con.close() end_t = time.time() - start_t return render_template("index.html", e=end_t)
def _user_get(authenticator_secret): if redis.exists('user_id_by_authenticator_secret_' + authenticator_secret): user_id = redis.get('user_id_by_authenticator_secret_' + authenticator_secret) user_data = redis.get('user_by_id_' + user_id) return json.loads(user_data) return None
def generate_session_id(): for i in range(100): session_id = str(uuid4()) # Ensure the session_id hasn't been used previously if not redis.exists(session_key(session_id)): return session_id raise ValueError('Unable to generate a new unique session UUID.')
def getNiuniuOperateList(redis, start_date, end_date): """ 获取某个时间段牛牛运营报表 """ try: startDate = datetime.strptime(start_date, '%Y-%m-%d') endDate = datetime.strptime(end_date, '%Y-%m-%d') except: weekDelTime = timedelta(7) weekBefore = datetime.now() - weekDelTime startDate = weekBefore endDate = datetime.now() deltaTime = timedelta(1) res = [] while startDate <= endDate: dateStr = startDate.strftime('%Y-%m-%d') if redis.exists(NIUNIU_GAME_OPERATE_BY_DAY_TABLE % dateStr): info = redis.hgetall(NIUNIU_GAME_OPERATE_BY_DAY_TABLE % dateStr) info['date'] = dateStr info['op'] = [] info['op'].append({ 'url': '/admin/niuniu/tile_type_op1?list=1', 'method': 'GET', 'txt': '查看牌型统计表' }) res.append(info) startDate += deltaTime res.reverse() return res
def search_request(message): # Create request request = { "id": message.id, "type": "comment" if message.was_comment else "message", "author": str(message.author), "link": f"https://www.reddit.com{message.context}" if message.was_comment else f"https://www.reddit.com/message/messages/{message.id}", "retries": 0 } # Check for duplicates util.already_processed_check(redis, request) lock = util.get_lock(request['id']) if redis.exists(lock): raise CurrentlyProcessing(request['link']) # Lock request to avoid duplicates redis.set(lock, "") request_json = json.dumps(request) # Enqueue for filtering redis.sadd(config['REDIS_REQUESTS_FILTER'], request_json) message.mark_read() logging.info(f"Found new request {request['id']} : {request['link']}.")
def init(queue): count = 0 while count< 5000: count +=1 try: header = {'User-Agent': 'Mozilla/5.0'} url = queue.pop() # print count if not redis.exists("%s:%s" % (key, url)): # print "crawling %s ... " % (url) req = urllib2.Request(url, headers=header) page = urllib2.urlopen(req) soup = BeautifulSoup(page) links = soup.find_all("a") ret_links = get_in_links(url, links) queue = ret_links + queue redis.set("%s:%s" % (key, url), 1) redis.sadd(key, url) dict = parse(url, soup) if dict: print dict else: # print "%s already crawled ....." % (url) pass except: print "error ........." pass
def add_acount(): while True: name = input("Your name? ") if not redis.exists(name): redis.sadd(name, name) redis.hset(MemMessages, name, "") break print("That name is already taken.", end=" ")
def geolatlng(addr): if redis.exists(f'a:{addr}'): location = json.loads(redis.get(f'a:{addr}')) else: #location = geolocator.geocode(addr) location = geocoder.arcgis(addr).json redis.set(f'a:{addr}',json.dumps(location)) return "N{};E{}".format(location['lat'],location['lng'])
def reset_matches(): players = redis.lrange('players', 0, -1) for player in players: pname = str(player).replace("b'","").replace("'","") if(redis.exists(pname+'-level')): redis.delete(pname+'-level') redis.delete(pname+'-skill') return f"{head}{css} Matches Reseted!{foot}"
def start_session(): players = redis.lrange('players', 0, -1) for player in players: pname = str(player).replace("b'","").replace("'","") if(redis.exists(pname+'-level')): redis.delete(pname+'-level') redis.delete(pname+'-skill') redis.delete("players") return f"{head}{css}New Session Started!{foot}"
def storage_get(value): try: if redis.exists(value): return True else: return False except: print('storage error') return False
def route_post_ad_count(slot, id): key = ad_key(slot, id) redis = get_redis() if not redis.exists(key): return jsonify({'error': 'not_found'}), 404 redis.hincrby(key, 'impressions', 1) return '', 204
def getProtocolResultFromGold(redis, _uuid, timeout=5): """ gold server 结果消息返回 """ while timeout > 0: key = RESULT_GOLD_SERVICE_PROTOCOL % _uuid if redis.exists(key): return json.loads(redis.get(key)) time.sleep(0.1) timeout = timeout - 0.1
def fr_redis(self): self.logger.info('Reading data from redis key.') import redis redis = redis.StrictRedis(self.config['sys_settings']['redis_ip']) key = self.config['out']['prefix'] + ':' + self.config['in']['master'] if redis.exists(key): self.data = pickle.loads(redis.get(key), encoding='bytes') else: self.logger.warning( f'Redis key {key} does not exist; might have expired.') self.fr_file()
def _f(*args, **kwargs): # TODO: this could be used as a DoS attack by filling up # redis. Maybe add global rate limiting? k = "rl:%s_%s" % (f.__name__, request.remote_addr) if not redis.exists(k) or not redis.ttl(k): redis.delete(k) redis.setex(k, 1, 60) return f(*args, **kwargs) if int(redis.get(k)) > per_minute: return "Too many requests per minute!", 429 redis.incr(k) return f(*args, **kwargs)
def calc_per_recharge(): if not redis.exists(FISH_SYSTEM_RECHARGE_TOTAL): do_create_recharge_total(redis) else: already_create_day = len(redis.keys(FISH_SYSTEM_DATE_RECHARGE_TOTAL%('*'))) already_recharge_total = convert_util.to_int(redis.get(FISH_SYSTEM_RECHARGE_TOTAL)) result = already_recharge_total/already_create_day print 'already_create_day[%s] already_recharge[%s] result[%s]'%(already_create_day,already_recharge_total,result) redis.set("fish:per:recharge:rate",result) #删除当天充值人数统计 redis.delete(FISH_RECHARGE_USER_DAY_IDS)
def get_location(query): redis = get_redis() key = f"location:{query}" if redis.exists(key): raw_location = json.loads(redis.get(key).decode("utf-8")) else: geocoder = Nominatim(user_agent="https://github.com/pbhuss/meowbot") location = geocoder.geocode(query) if location is None: return None raw_location = location.raw redis.set(key, json.dumps(raw_location), ex=30 * 24 * 60 * 60) return raw_location
def update(): genomes = json.loads(redis.get('genomes') if redis.exists('genomes') else "[]") data = { "genomes": genomes, "generation": redis.get('generation'), "best_genome": redis.get('best_genome'), "best_fitness": redis.get('best_fitness'), "average_fitness": redis.get('average_fitness'), "std_fitness": redis.get('std_fitness'), "duration_run": redis.get('duration_run'), "duration_generation": redis.get('duration_generation'), } return jsonify(**data)
def _get_session_state(): """TBD""" response_data = gbm.get_challenge_state(request.form['challenge']) if response_data['success'] and response_data['status'] == 3: if redis.exists('user_id_by_authenticator_secret_' + response_data['authenticator_secret']): user_id = redis.get('user_id_by_authenticator_secret_' + response_data['authenticator_secret']) _user_login(user_id) else: _flash_log('Authentication error') del response_data['authenticator_secret'] return response_data
def getOnlineAIInfos(redis): online_ai_sum = 0 cur_ai_gold_sum = 0 online_ai_room_num_set = set() for key in redis.smembers('users:robot:accounts:set'): online, account, gold= redis.hmget(key, 'isOnline', 'account', 'gold') gold = int(gold) if gold else 0 cur_ai_gold_sum += gold if online == '1': online_ai_sum += 1 if redis.exists(GOLD_ROOM_ACCOUNT_KEY % account): online_ai_room_num_set.add(redis.get(GOLD_ROOM_ACCOUNT_KEY % account)) return online_ai_sum, len(online_ai_room_num_set), cur_ai_gold_sum
def do_PlayerWelfareSign(redis, account): """ 签到接口 """ today = datetime.now().strftime("%Y-%m-%d") key = WELFARE_USER_SIGN % (account, today) if redis.exists(key): return gold = 2000 if not player_add_gold(redis, account, gold): return redis.set(key, 1) return gold
def calc_per_login(): if not redis.exists("fish:login:per:day:total"): do_create_total(redis) else: today = date.today() do_add_yesterday_data(redis,today-timeDelt) already_create_day = len(redis.keys(FORMAT_LOGIN_DATE_TABLE4FISH%('*'))) already_login_total = convert_util.to_int(redis.get("fish:login:per:day:total")) result = already_login_total/already_create_day print 'already_create_day[%s] already_login_total[%s] result[%s]'%(already_create_day,already_login_total,result) redis.set("fish:per:login:rate",result)
def get_in_links(page_url, links): in_links = [] for link in links: href = link.get("href") if href and http not in href: href = "%s%s" % (base_url, href) if href.startswith("/") else "%s/%s" % (page_url, href) if "https://www.mustakbil.com/events/" not in href and not redis.exists("%s:%s" % ("queue", href)): # print "url %s has been queued" % (href) redis.set("%s:%s" % ("queue", href), 1) in_links.append(href) # else: # print "%s has already been queued" % (href) in_links.reverse() return in_links
def turn_off_spider(oj_name): if redis.exists(switch_key): redis.hset(switch_key, oj_name, 0) log_spider_status()
def __get_key_or_null(k): if not redis.exists(k): fetch_data() return json.loads(redis.get(k) or 'null')
def exists(name): return redis.exists(rayter_name(name))
def errors(conditions, paging): print 'brquery.errors()' print "conditions:" for k in conditions: print "%s=%s"%(k,conditions[k]) print "paging:" for k in paging: print "%s=%s"%(k,paging[k]) #Check token if 'token' in conditions: token=conditions.pop('token') else: return {"error":{"code":12,"msg":"Invalid token"}} redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) pagingCached=False pagingSet=None page=int(paging['page']) records=int(paging['records']) totalRecords=None paging_token=None if 'paging_token' in paging: paging_token=paging['paging_token'] pagingSet="tmp_paging_%s"%paging_token if redis.exists(pagingSet): pagingCached=True ret = [] if not pagingCached: print "not paging!" sets = [] #temporary set names accessibleSet='tmp_%s'%str(uuid.uuid4()) timeFilteredSet='tmp_%s'%str(uuid.uuid4()) appSet='tmp_%s'%str(uuid.uuid4()) resultIdsSet='tmp_%s'%str(uuid.uuid4()) paging_token=str(uuid.uuid4()) pagingSet="tmp_paging_%s"%paging_token redis.hset("paging_token",paging_token,int(time.time())+30*60) #Get accessible set accessibleSet=getAccessibleSet(token,accessibleSet) if accessibleSet==None: print "Accessible Set is None!" return ret else: sets.append(accessibleSet) print "accessible sets[]:%s"%sets #Get time filtered set starttime=0 endtime=0 ntf=False if 'starttime' in conditions: starttime=conditions.pop('starttime') ntf=True if 'endtime' in conditions: endtime=conditions.pop('endtime') ntf=True if ntf: timeFilteredSet=getTimeFilteredSet(starttime,endtime,timeFilteredSet) if timeFilteredSet==None: print "Time Filtered Set is None!" return ret else: sets.append(timeFilteredSet) print "filtered sets[]:%s"%sets #For error type if 'e_type' in conditions: sets.append('ids:b:%s' % conditions.pop('e_type')) else: sets.append('ids:e')#All errors without call crop. #Other query conditions for k, v in conditions.items(): if k == 'name': sets_app = [] sets_app.append('ids:b:FORCE_CLOSE:%s' % v) sets_app.append('ids:b:ANR:%s' % v) sets_app.append('ids:b:CORE_DUMP:%s' % v) sets_app.append('ids:b:MANUALLY_REPORT:%s' % v) pipe.sunionstore(appSet,sets_app) result=pipe.execute() if result[0]==0: return [] else: sets.append(appSet) else: sets.append('ids:i:%s:%s' % (k,v)) print "last sets[]:%s"%sets #Intersect all the sets #TODO: Is here the best place to retrieve all the set members? pipe.sinterstore(pagingSet,sets) pipe.sort(pagingSet,desc=True,start=(page-1)*records,num=records) result=pipe.execute() totalRecords=result[0] ret=list(result[1]) print "totalRecords:%s"%totalRecords print "pagingSet:%s"%pagingSet #Delete all temporary set pipe.delete(accessibleSet) pipe.delete(timeFilteredSet) pipe.delete(appSet) pipe.delete(resultIdsSet) pipe.execute() else: print "has paging!" pipe.hset("paging_token",paging_token,int(time.time())+30*60) pipe.card(pagingSet) pipe.sort(pagingSet,desc=True,start=(page-1)*records,num=records) result=pipe.execute() totalRecords=result[1] ret=list(result[2]) if len(ret)==0: print "Return set is empty!" return {} recordList=proxy.records(ret,token) print "Records:%s"%recordList paging['totalrecords']=totalRecords remainder=totalRecords%records if remainder>0: paging['totalpages']=totalRecords/records+1 else: paging['totalpages']=totalRecords/records paging['paging_token']=paging_token results={'paging':paging,'data':recordList} return results
def prevent_collision(loc): if redis.exists(loc): return False else: redis.set(loc, '1') return True
def act(self, game): restricted_zones = [(0, 0), (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), (7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0), (13, 0), (14, 0), (15, 0), (16, 0), (17, 0), (18, 0), (0, 1), (1, 1), (2, 1), (3, 1), (4, 1), (5, 1), (6, 1), (12, 1), (13, 1), (14, 1), (15, 1), (16, 1), (17, 1), (18, 1), (0, 2), (1, 2), (2, 2), (3, 2), (4, 2), (14, 2), (15, 2), (16, 2), (17, 2), (18, 2), (0, 3), (1, 3), (2, 3), (16, 3), (17, 3), (18, 3), (0, 4), (1, 4), (2, 4), (16, 4), (17, 4), (18, 4), (0, 5), (1, 5), (17, 5), (18, 5), (0, 6), (1, 6), (17, 6), (18, 6), (0, 7), (18, 7), (0, 8), (18, 8), (0, 9), (18, 9), (0, 10), (18, 10), (0, 11), (18, 11), (0, 12), (1, 12), (17, 12), (18, 12), (0, 13), (1, 13), (17, 13), (18, 13), (0, 14), (1, 14), (2, 14), (16, 14), (17, 14), (18, 14), (0, 15), (1, 15), (2, 15), (16, 15), (17, 15), (18, 15), (0, 16), (1, 16), (2, 16), (3, 16), (4, 16), (14, 16), (15, 16), (16, 16), (17, 16), (18, 16), (0, 17), (1, 17), (2, 17), (3, 17), (4, 17), (5, 17), (6, 17), (12, 17), (13, 17), (14, 17), (15, 17), (16, 17), (17, 17), (18, 17), (0, 18), (1, 18), (2, 18), (3, 18), (4, 18), (5, 18), (6, 18), (7, 18), (8, 18), (9, 18), (10, 18), (11, 18), (12, 18), (13, 18), (14, 18), (15, 18), (16, 18), (17, 18), (18, 18), (7, 1), (8, 1), (9, 1), (10, 1), (11, 1), (5, 2), (6, 2), (12, 2), (13, 2), (3, 3), (4, 3),(14, 3), (15, 3), (3, 4), (15, 4), (2, 5), (16, 5), (2, 6), (16, 6), (1, 7), (17, 7),(1, 8), (17, 8), (1, 9), (17, 9), (1, 10), (17, 10), (1, 11), (17, 11), (2, 12), (16, 12),(2, 13), (16, 13), (3, 14), (15, 14), (3, 15), (4, 15), (14, 15), (15, 15), (5, 16), (6, 16),(12, 16), (13, 16), (7, 17), (8, 17), (9, 17), (10, 17), (11, 17)] locs = self.close_locs() robots = game['robots'] # Calculate distance: def distance(loc1, loc2): return abs(loc1[0] - loc2[0]) + abs(loc1[1] - loc2[1]) # Check current turn and remove previous move log: if redis.exists('turn'): if int(redis.get('turn')) != game['turn']: redis.flushdb() redis.set('turn', game['turn']) else: redis.flushdb() redis.set('turn', 1) # Look up friendly robots moves and store mine: def prevent_collision(loc): if redis.exists(loc): return False else: redis.set(loc, '1') return True # Find the closest enemy for me: def find_closest_enemy(loc): distance_list = {} # Loop over enemies: for enemy in robots: if len(distance_list) > 0: if distance(loc, enemy) \ < distance_list[distance_list.keys()[0]]: distance_list[enemy] = distance(loc, enemy) else: distance_list[enemy] = distance(loc, enemy) # Return closest enemy if len(distance_list) > 0: return distance_list.keys()[0] # Look up the best available path to get to this enemy: def find_best_path(target): path_list = {} # Loop over paths: for loc in locs: if len(path_list) > 0: if path_list[path_list.keys()[0]] > distance(loc, target): if prevent_collision(loc): if loc not in restricted_zones: path_list.clear() path_list[loc] = distance(loc, target) else: path_list[loc] = distance(loc, target) # Return best path # Should be sorted.... if len(path_list) > 0: return path_list.keys()[0] for loc in locs: # Should I suicide? if self.hp < 11: print 'Got orders: suicide' return ['suicide'] elif robots.get(loc): # Can I attack? print 'Got orders: attack' return ['attack', loc] elif find_closest_enemy(loc): # Are we past turn 10? # elif game['turn'] > 10: # print "Got orders: move" # best path here # Can I move? if find_best_path: print 'Got orders: march' return ['move', find_best_path(find_closest_enemy(loc))] else: # I will guard print 'Got orders: guard' return ['guard']
def process_message(self, peer, mailfrom, rcpttos, data): # first, figure out if this is a sample we should use to populate the # known senders list print "rcpttos[0].split('@',1)[0]", rcpttos[0].split("@", 1)[0] if rcpttos[0].split("@", 1)[0] == "addthis": self.addSenderBasedOnMessage(peer, mailfrom, rcpttos, data) return lines = data.split("\n") # Look for the last header i = 0 for line in lines: if not line: break i += 1 # XXX do we need to add a Received: line? lines.insert( i, "Received: from " + peer[0] + "\n" + " by " + host + "\n" + " " + email.Utils.formatdate(None, True), ) # specify tz lines.insert(i, "X-Peer: %s" % peer[0]) data = NEWLINE.join(lines) # look up rcpttos in our redis db, and swap them if we have them in # our db. acceptable = False new_rcpttos = [] for rcptto in rcpttos: print "RCPTTO", rcptto username, domain = rcptto.split("@", 1) new_rcptto = rcptto if domain == host: print >> DEBUGSTREAM, "message for us" target = redis.get("alias:" + username) if target: print >> DEBUGSTREAM, "FOUND a mapping from " + username + " to " + target acceptable = True new_rcptto = target new_rcpttos.append(new_rcptto) if not acceptable: # XXX we should really bounce the mail return # is this an email we can deal with here, or do we forward it? action = "store" # for now, we always store, for testing. # "forward" domain = domain_from_address(mailfrom) print "domain", domain, "target", target if redis.exists("knownsender:" + domain + ":" + target): action = "store" if action == "forward": refused = self._deliver(mailfrom, new_rcpttos, data) # TBD: what to do with refused addresses? if refused: print >> DEBUGSTREAM, "we got some refusals:", refused elif action == "store": refused = self._deliver(mailfrom, new_rcpttos, data) self.store_message(mailfrom, target, new_rcpttos, data) print >> DEBUGSTREAM, "we really should store this message!!!!!!!!!!!!!"
def autoinc(redis, key): key = "_incs:%s" % key if not redis.exists(key): redis.set(key, 0) return redis.incr(key)