def _all_(m): if m.chat.type == "group" or m.chat.type == "supergroup": uid = m.from_user.id cid = m.chat.id uname = m.from_user.first_name or "ادمین" status = redis.get(key + str(cid) + ":status") or "off" if bot.get_chat_member(cid, uid).status == "member": if status == "on": if redis.sismember(key + str(cid) + ":allow_list", uid) == False: bot.delete_message(cid, m.message_id) max_add = redis.get(key + str(cid) + ":max_add") or 3 if redis.sismember(key + str(cid) + ":warn_list", uid) == False: text = """کاربر [{}](tg://user?id={}): برای چت کردن در این گروه باید 3 نفر را به گروه دعوت کنید!""".format( uname, uid, max_add) bot.send_message(cid, text, parse_mode="Markdown") redis.sadd(key + str(cid) + ":warn_list", uid) else: if m.text == "/panel": bot.delete_message(cid, m.message_id) text = "[{}](tg://user?id={}):\nبا دکمه های زیر حد اکثر تعداد دعوت ممبر ها را تایین کنید🎈😁".format( uname, uid) bot.send_message(cid, text, parse_mode="Markdown", reply_markup=panel(cid))
def main(): redis.set("hits:homepage", 2000) redis.set("hits:loginpage", 75) homepage = redis.get("hits:homepage") loginpage = redis.get("hits:loginpage") # Register our script with the Redis Python client and # return a callable object for invoking our script. stats = redis.register_script(stats_script) # Invoke our "sum" script. # This calls SCRIPT LOAD and then stores # the SHA1 digest of the script for future use. total = stats(["hits:homepage", "hits:loginpage"], ["sum"]) assert (total == 2075) # Two more tests. max = stats(["hits:homepage", "hits:loginpage"], ["max"]) assert (max == 2000) print('calling script to sum {} + {} = {}'.format(homepage, loginpage, total)) print('calling script get max {} or {} ? {}'.format( homepage, loginpage, max))
def resubmit_jobs(): ''' Examines the fetch and gather queues for items that are suspiciously old. These are removed from the queues and placed back on them afresh, to ensure the fetch & gather consumers are triggered to process it. ''' if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() # fetch queue harvest_object_pending = redis.keys(get_fetch_routing_key() + ':*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") # 3 minutes for fetch and import max if (datetime.datetime.now() - date_of_key).seconds > 180: redis.rpush(get_fetch_routing_key(), json.dumps({'harvest_object_id': key.split(':')[-1]}) ) redis.delete(key) # gather queue harvest_jobs_pending = redis.keys(get_gather_routing_key() + ':*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") # 3 hours for a gather if (datetime.datetime.now() - date_of_key).seconds > 7200: redis.rpush(get_gather_routing_key(), json.dumps({'harvest_job_id': key.split(':')[-1]}) ) redis.delete(key)
def storage(): redis.expire(session['current_user'], time=300) user_path = app.upload_path.joinpath(redis.get(session['current_user']).decode('utf-8')).resolve() user = redis.get(session['current_user']).decode('utf-8') cont = requests.get(app.file_server + "list/" + user, verify=False).content.decode() miniature = requests.get(app.file_server + "miniatures/" + user, verify=False).content.decode() files = json.loads(cont)['list'] miniatures = [] miniatures = json.loads(miniature)['list'] files = [] for filename in os.listdir(str(user_path)): data = [] data.append(filename) data.append(str(os.stat(str(user_path) + "/" + filename).st_size) + "B") data.append("/slyko/dl/download/" + filename) data.append("/slyko/dl/delete/" + filename) data.append("/slyko/static/miniatures/" + filename) miniatures.append(filename) files.append(data) tokens = {} #for f in files: # tokens[f]=creating_token(f,240).decode('utf-8') return render_template( 'storage.html',user=redis.get(session['current_user']).decode('utf-8'), files_len=len(files), files=files, tokens=tokens, miniatures=miniatures)
def conductor(redis): groups = list(StaticGroups.objects.all()) + map( lambda x: x.group_info, Groups.objects.filter(behavior=1)) for group in groups: if redis.get(group.group_id): day = datetime.utcnow() data_delta = timedelta(hours=24) if group.impression == False and int( redis.get(group.group_id) ) >= group.limit_impressions and day - data_delta >= group.end_limit.replace( tzinfo=None): redis.set(group.group_id, '0') group.limit_impressions = random_generate() group.save() continue if group.impression == False and int(redis.get( group.group_id)) >= group.limit_impressions: group.impression = True group.save() redis.set(group.group_id, '0') continue if group.impression == True: delta = timedelta(hours=WAITING) if day - delta >= group.end_limit.replace(tzinfo=None): group.impression = False group.limit_impressions = random_generate() group.save() else: continue
def get_new_group(message: telebot.types.Message): group = message.text user_id = message.from_user.id if group: group = group.upper() else: return kb = create_main_keyboard(user_id) emoji_list = list('😀😃😄😊🙃👽🤖🤪😝') emoji = random.choice(emoji_list) current_week = redis.get('current_week').decode('utf8') try: bot.delete_message(message.chat.id, message.message_id) except ApiException: pass try: groupoid = parsing.get_groupoid_or_raise_exception(group, redis) except exceptions.MpeiBotException as e: user_group = redis.get(f'user_group:{message.from_user.id}') if user_group: user_group = user_group.decode('utf8') continue_text = f'студент {user_group} {emoji}. Сегодня идет {current_week} неделя' else: continue_text = f'МЭИшник {emoji}. Сегодня идет {current_week} неделя' bot.send_message(message.chat.id, text=e.message) bot.send_message(message.chat.id, text=f'Привет, {continue_text}', reply_markup=kb) return redis.set(f'user_groupoid:{message.from_user.id}', value=groupoid) redis.set(f'user_group:{message.from_user.id}', value=group) continue_text = f'студент {group} {emoji}. Сегодня идет {current_week} неделя' bot.send_message(message.chat.id, f'Привет, {continue_text}', reply_markup=kb)
def getDefaultRoomCard(redis, groupId, userId, lastGroup=None): """ 获取用户的房卡数 传入参数: redis,groupId(公会ID),userId(玩家ID),lastGroup(是否第一次) 返回参数:默认房卡 """ provinceAgId = getTopAgentId(redis, groupId) defaultCard = redis.get(USER4AGENT_CARD % (provinceAgId, userId)) log_util.debug( '[getDefaultRoomCard] groupId[%s] userId[%s] defaultCards[%s]' % (provinceAgId, userId, defaultCard)) if not defaultCard: if lastGroup: #如果不是第一次加公会则返回当前的卡,没有则是0 defaultCard = redis.get(USER4AGENT_CARD % (groupId, userId)) if not defaultCard: return 0 return defaultCard #如果是第一次则赠送默认房卡 defaultCard = redis.hget(AGENT_TABLE % (provinceAgId), 'defaultRoomCard') if not defaultCard: defaultCard = 0 log_util.debug('[getDefaultRoomCard] return defaultCard[%s] groupId[%s]' % (defaultCard, groupId)) return defaultCard
def create_url(): data = request.get_json() full_url = data['full_url'] try: url_life = data['url_life'] except KeyError: url_life = 90 * 24 * 60 * 60 if not full_url.startswith('http://') and not full_url.startswith( 'https://'): return make_response('<h2>Invalid URL format</h2>', 400) result = redis.get(full_url) if result != None: return f"Shortened URL for {full_url} already exists: /{result}\n\n" else: short_url = ''.join(random.choice(alphabet) for i in range(url_len)) while redis.get(short_url): short_url = ''.join( random.choice(alphabet) for i in range(url_len)) if url_life <= 0: url_life = 90 * 24 * 60 * 60 #90 days else: url_life = url_life * 24 * 60 * 60 redis.set(full_url, short_url, url_life) redis.set(short_url, full_url, url_life) return f"Shortened URL for {full_url} is: /{short_url}\n\n"
def random(self): youtube_ids = redis.srandmember("musicacommonset", 30) if not youtube_ids: return {"success": False} nonrecent = [] total = 0 for youtube_id in youtube_ids: youtube_id = youtube_id.decode() ltime = redis.get("musicatime.%s" % youtube_id) if ltime is None or time.time() - (float(ltime.decode()) or 0) >= 3600: for i in range( int( redis.get("musicacommon.%s" % youtube_id).decode()) or 1): nonrecent.append(youtube_id) if not youtube_ids: return {"success": False} youtube_id = query_search(random.choice(nonrecent), search=False) if youtube_id else None if not youtube_id: return {"success": False} youtube_id = youtube_id[0] redis.rpush( "musicaqueue", json.dumps({ "ytid": youtube_id, "uuid": str(uuid.uuid4()) })) redis.rpush("musicaload", youtube_id) redis.set("musicatime.%s" % youtube_id, time.time()) return {"success": True, "ytid": youtube_id}
def health_check(): latest_block_num = None latest_block_hash = None stored_latest_block_num = redis.get(latest_block_redis_key) if stored_latest_block_num is not None: latest_block_num = int(stored_latest_block_num) stored_latest_blockhash = redis.get(latest_block_hash_redis_key) if stored_latest_blockhash is not None: latest_block_hash = stored_latest_blockhash.decode("utf-8") if latest_block_num is None or latest_block_hash is None: latest_block = web3.eth.getBlock("latest", True) latest_block_num = latest_block.number latest_block_hash = latest_block.hash health_results = _get_db_block_state(latest_block_num, latest_block_hash) verbose = request.args.get("verbose", type=str) == 'true' if verbose: # DB connections check health_results["db_connections"] = _get_db_conn_state() # Return error on unhealthy block diff if requested. enforce_block_diff = request.args.get("enforce_block_diff", type=str) == 'true' if enforce_block_diff and health_results[ "block_difference"] > HEALTHY_BLOCK_DIFF: return jsonify(health_results), 500 return jsonify(health_results), 200
def resubmit_jobs(): if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() log.debug('_create_or_update_package') harvest_object_pending = redis.keys('harvest_object_id:*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key ).seconds > 180: # 3 minuites for fetch and import max redis.rpush('harvest_object_id', json.dumps({'harvest_object_id': key.split(':')[-1]})) redis.delete(key) harvest_jobs_pending = redis.keys('harvest_job_id:*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key).seconds > 7200: # 3 hours for a gather redis.rpush('harvest_job_id', json.dumps({'harvest_job_id': key.split(':')[-1]})) redis.delete(key)
def filters(body): # noqa: E501 """urlから判断する # noqa: E501 :param body: :type body: dict | bytes :rtype: InlineResponse200 """ if connexion.request.is_json: body = Body.from_dict(connexion.request.get_json()) # noqa: E501 texts: List[str] = [] cached_rslt: List[Tuple[int, float]] = [] calc_urls: List[str] = [] for i, url in enumerate(body.urls): if rslt := redis.get(url): # type: ignore cached_rslt.append((i, float(rslt.decode()))) continue calc_urls.append(url) try: text = oneURL2text.oneURL2text(url) texts.append(text) except Exception as e: with open(f"{__main__.DATA_PATH}/error.csv", mode="a") as f: f.writelines([str(e), ", ", url, "\n"]) raise e
def Info(name: str = None,info = None): x = connect(name) response = requests.get(x).content country = response result = redis.get(str(info)) if not result: info = info.split(",") out = "{" websiteinfo = json.loads(country)[0] if websiteinfo is None: return "error in name" else: for s in info: try: out += s+":"+str(websiteinfo[s])+ "," except KeyError: return "wrong info" out += "}" redis.setex (str(info),60,str(out)) result = redis.get(str(info)) return str(result) else: return result
def lambda_handler(event, context): try: redis.get('last_fed_timestamps') converted_last_fed = datetime.strptime( redis.get('last_fed_timestamps'), '%Y-%m-%d %H:%M:%S.%f') diff = (datetime.now() - converted_last_fed).total_seconds() if diff > 900.00: print('cat is hungry') if get_key('MESSAGE_NOT_FED_SENT') == 'False': email(NOT_FED_MESSAGE) print('Message sent') set_key('cat_status', 'hungry') set_key('MESSAGE_NOT_FED_SENT', 'True') elif get_key('cat_status') == 'ok': print('cat has been fed') if get_key('MESSAGE_FED_SENT') == 'False': email(FED_MESSAGE) print('Message sent') set_key('MESSAGE_FED_SENT', 'True') print('Seconds diff time is {}'.format(diff)) return 'Job Finished' except Exception as e: print(e) raise e
def _user_get(authenticator_secret): if redis.exists('user_id_by_authenticator_secret_' + authenticator_secret): user_id = redis.get('user_id_by_authenticator_secret_' + authenticator_secret) user_data = redis.get('user_by_id_' + user_id) return json.loads(user_data) return None
def resubmit_jobs(): if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() log.debug('_create_or_update_package') harvest_object_pending = redis.keys('harvest_object_id:*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key).seconds > 180: # 3 minuites for fetch and import max redis.rpush('harvest_object_id', json.dumps({'harvest_object_id': key.split(':')[-1]}) ) redis.delete(key) harvest_jobs_pending = redis.keys('harvest_job_id:*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key).seconds > 7200: # 3 hours for a gather redis.rpush('harvest_job_id', json.dumps({'harvest_job_id': key.split(':')[-1]}) ) redis.delete(key)
def index(request): if request.method == 'POST': keyword = request.POST['search'] if not redis.get(keyword): # Here, we are only getting first 50 results search_params = { 'key': YOUTUBE_API_KEY, 'part': 'snippet', 'q': keyword, 'maxResults': '50', 'order': 'date', 'type': 'video' } search_response = requests.get(YOUTUBE_SEARCH_URL, params=search_params) json_data = search_response.json() next_token = json_data.get("nextPageToken") if next_token: tasks.fetch_all( keyword, next_token ) # This task runs in the background to fetch the remaining 150 results videos_list = [] video_data = OrderedDict() for r in search_response.json()['items']: videos_list.append( r['id'] ['videoId']) # gather all video ids to use for next query video_data[r['id']['videoId']] = r[ 'snippet'] # save video data from search endpoint videos_params = { 'key': YOUTUBE_API_KEY, 'part': 'statistics', 'id': ','.join(videos_list), } videos_response = requests.get( YOUTUBE_VIDEO_URL, params=videos_params) # second query to get video views results = videos_response.json()['items'] videos = [] for r in results: data = video_data.get(r['id']) data['views'] = r['statistics'][ 'viewCount'] # append views to video data data[ 'url'] = f"https://www.youtube.com/watch?v={r['id']}" # generate url videos.append(data) new_cache = json.dumps(videos) redis.setex(keyword, 600, new_cache) # cache the result context = {'videos': videos, 'keyword': keyword, 'show_all': False} return render(request, "makr/result.html", context) cached = redis.get(keyword) cached_videos = json.loads(cached) context = { 'videos': cached_videos, 'keyword': keyword, 'show_all': True } return render(request, "makr/result.html", context) return render(request, "makr/index.html")
def health_check(): # can extend this in future to include ganache connectivity, how recently a block # has been added (ex. if it's been more than 30 minutes since last block), etc. latest_block_num = None latest_block_hash = None stored_latest_block_num = redis.get(latest_block_redis_key) if stored_latest_block_num is not None: latest_block_num = int(stored_latest_block_num) stored_latest_blockhash = redis.get(latest_block_hash_redis_key) if stored_latest_blockhash is not None: latest_block_hash = stored_latest_blockhash.decode("utf-8") if latest_block_num is None or latest_block_hash is None: latest_block = web3.eth.getBlock("latest", True) latest_block_num = latest_block.number latest_block_hash = latest_block.hash health_results = _get_db_block_state(latest_block_num, latest_block_hash) verbose = request.args.get("verbose", type=str) == 'true' if verbose: # DB connections check health_results["db_connections"] = _get_db_conn_state() if health_results["block_difference"] > HEALTHY_BLOCK_DIFF: return jsonify(health_results), 500 return jsonify(health_results), 200
def hongbao(): """ 定期统计用户发送口令, 获取红包的情况 规则: 用户向派派发送口令, 获得红包 :return: """ from datetime import datetime from bson import ObjectId import re redis = _redis_client() # 获得已发红包的用户 processed_users = set(json.loads(redis.get('viae/viae.provisional.hongbao/processed_users') or '[]')) # 获得红包处理进度的时间戳 utc_tz = timezone('UTC') processed_since = redis.get('viae/viae.provisional.hongbao/processed_ts') logger.info('Processing from %s' % processed_since) processed_since = datetime.strptime(processed_since, '%Y-%m-%d %H:%M:%S').replace(tzinfo=utc_tz) dummy_id = ObjectId.from_datetime(processed_since) # 找到哪些用户发送过红包口令 pattern = re.compile(u'(体验旅行派APP领现金红包|新用户口令|领新用户红包|从微信过来领红包|下单送北京大房免费住)', re.IGNORECASE) sender_list = mongo_hedy.Message.distinct('senderId', {'_id': {'$gt': dummy_id}, 'receiverId': 10000, 'contents': pattern}) # 这些用户必须不在已发送红包的列表中, 并且为两天内注册的 final_senders = {} user_dummy_id = ObjectId.from_datetime(processed_since - timedelta(days=7)) for s in filter(lambda v: v not in processed_users, sender_list): u = mongo_yunkai.UserInfo.find_one({'userId': s, '_id': {'$gt': user_dummy_id}}, {'userId': 1, 'nickName': 1}) if not u: continue final_senders[u['userId']] = u if final_senders: # 准备报表 sections = [] for uid, user in sorted(final_senders.items(), key=lambda v: v[0]): messages = mongo_hedy.Message.find({'senderId': uid, 'receiverId': 10000}, {'contents': 1}) c = '\n'.join([tmp['contents'] for tmp in messages]) sections.append(u'%d: %s\n%s\n\n' % (uid, user['nickName'], c)) processed_users.add(uid) email_contents = ''.join(sections).strip() from viae.job import send_email_to_group, send_email logger.info('Sending hongbao stats') send_email_to_group(groups='MARKETPLACE', subject=u'红包申请统计', body=email_contents) # 默认7天过期 expire = 7 * 24 * 3600 redis.set('viae/viae.provisional.hongbao/processed_users', json.dumps(list(processed_users)), expire) redis.set('viae/viae.provisional.hongbao/processed_ts', (datetime.utcnow() - timedelta(minutes=20)).replace(tzinfo=utc_tz).strftime('%Y-%m-%d %H:%M:%S'), expire)
def index(): handle = unicode(redis.get("handle"), "utf-8") roomnum = unicode(redis.get("roomnum"), "utf-8") print("index:", handle, roomnum) return render_template("index.html", handle=handle, roomnum=roomnum )
def fetch_cache_name(name: str = None): result = redis.get(name) if not result: country_info(name) result = redis.get(name) return str(result) + "new" else: return result
def finish(): customer_queue = pickle.loads(redis.get('c_q')) currently_serving = pickle.loads(redis.get('c_s')) customer_queue.delete_by_uid(uuid.UUID(request.args.get('finish'))) del currently_serving[uuid.UUID(request.args.get('finish'))] redis.set('c_s', pickle.dumps(currently_serving)) redis.set('c_q', pickle.dumps(customer_queue)) return redirect('/admin')
def _handle_message(self, msg): """ Forwards messages from Redis to the players directly connected to this instance. """ self._debug('RECEIVED - {}', extra=[str(msg)]) if msg['type'] != 'message' or msg['channel'] != self.topic: return # Ignore messages we don't need msg = json_loads(msg['data']) # If we have the artist, we're responsible for adjusting game state must_pass = False artist = self._get_artist() score = None guesser = None if self._has_artist(artist): if msg.verb == 'GUESSED': if msg.player_name == artist: self._error('artist ({}) submitted a guess', extra=[artist]) else: word = redis.get(self.word_key) # TODO: Correct is only set for clients connected to this instance. msg.correct = word.lower() == msg.word.lower() if msg.correct: guesser = msg.player_name score = redis.zincrby(self.players_key, msg.player_name, 1) word_won(word) must_pass = True elif msg.verb == 'SKIPPED': if msg.player_name == artist: self._error('artist ({}) voted to skip', extra=[artist]) else: voted = redis.scard(self.skip_key) total = redis.zcard(self.players_key) - 1 if voted * 2 > total: must_pass = True elif msg.verb == 'ENDED': if msg.player_name == artist: must_pass = True # TODO: Player name will be sent on other instances del msg.player_name # Repeat the message to all players for p in self.players: if msg.verb == 'PASSED' and msg.player_name == p.name: # Add the word to the passed message for the correct player special = Message(msg) special.word = redis.get(self.word_key) gevent.spawn(p.send, special) else: gevent.spawn(p.send, msg) if must_pass: self._pass_turn(artist, guesser=guesser, score=score)
def read_redis(data): import redis #redis = redis.Redis(host = 'redis',port = 6379, decode_responses=True) redis = redis.Redis() for d in redis.scan_iter(): if data == d: return redis.get(data) return redis.get(data)
def shorten_details(short_id): link_target = redis.get('url-target:' + short_id) if link_target is None: raise NotFound() click_count = int(redis.get('click-count:' + short_id) or 0) return render_template('details.html', short_id=short_id, click_count=click_count, link_target=link_target)
def kill(): redis.incr('kills') if request.forms.get("name"): user = request.forms.get("name") # print user redis.incr(user) return template('<b>Flies killed: {{num_killed}}</b>!', num_killed=redis.get(user)) else: return template('<b>Flies killed: {{num_killed}}</b>!', num_killed=redis.get('kills'))
def add_total_score_to_one(location, max_price=10000000.0, min_price=0, min_lot_size=0, price_weight=1, transit_weight=1, size_weight=1): if transit_weight < 0: walk_weight = 1 log.info("Reset negative walk weight to 1") if price_weight < 0: price_weight = 1 log.info("Reset negative bike weight to 1") if size_weight < 0: size_weight = 1 log.info("Reset negative transit weight to 1") weighted_sum = price_weight + transit_weight + size_weight if weighted_sum <= 0: # Someone entered weird values log.info("Resetting weights due to weighted_sum being <=0") weighted_sum = 3 price_weight = transit_weight = size_weight = 1 try: transit_score = redis.get(location.listing_key + "/downtown_commute_score") if transit_score is None: transit_score = 0 listing = json.loads(redis.get(location.listing_key)) price = listing['price'] if price > max_price or price < min_price: price_score = 0 else: price_score = 10 if 'lot_size' in listing: size = listing['lot_size'] try: size = [int(s) for s in size.split() if s.isdigit()][-1] if size > min_lot_size: size_score = 10 except: log.warning('lot size not parsed for ' + location.listing_key) size_score = 0 else: size_score = 0 score = (price_score * price_weight + size_score * size_weight + float(transit_score) * transit_weight) / (weighted_sum) print(score) redis.set(location.listing_key + "/total_score", score) except Exception as e: log.exception(traceback.format_exc())
def index(): visitors = redis.get('visitors') os = redis.get('osdata') num = 0 if visitors is None else int(visitors) num += 1 redis.set('visitors', num) user_agent = request.user_agent return render_template('index.html', number=num, user_agent=user_agent)
def get_bytecoin_price(): last_price = redis.get("bytecoin_price") if last_price >= 350: diff = -50 elif last_price <= 122: diff = 32 else: diff = random.randint(-5, 5) redis.set("bytecoin_price", last_price + diff) return redis.get("bytecoin_price")
def background_thread(): """Send server generated events to clients.""" while True: socketio.sleep(1) data = { "miner_count": redis.get("num_miners"), "bytecoin_price": get_bytecoin_price(), "total_work": redis.get("combined_mining_speed") } socketio.emit("stats", data, namespace="/bytecoin")
def run_time(self, function, interval, *args, **kwargs): while True: duration = time.time() - float(redis.get('before')) if duration > interval: print(duration) function(*args, **kwargs) redis.set('before', time.time()) print('updated') redis.publish(REDIS_CHAN, redis.get('activity_data')) gevent.sleep(interval / 10)
def _get_like_list(ad_id): likes = redis.keys('user:like:*:%s' % ad_id) like_list = [] for like in likes: l_user = redis.get(like) l_date = redis.get('date:%s' % ":".join(like.split(':')[1:])) like_list.append({'user': l_user, 'date': l_date}) return like_list
def join(self, player): if player.table == self: return # Already part of this table. if player.table is not None: player.table.leave(player) # Player has to leave old table msg = Message('JOINED') # Tell all the other players msg.player_name = player.name # that a new player has joined. self.send(msg) player.table = self # Register the new player with self.players.append(player) # this table. # Get a list of all the other players on this table others = redis.zrange(self.players_key, 0, -1) # Check if the player exists (race condition I guess) score = redis.zscore(self.players_key, player.name) if score is None: # Add new player to the player list redis.zadd(self.players_key, player.name, 0) # Add player to the turn list if he/she wasn't already there redis.zadd(self.turns_key, player.name, time.time()) # Prepare joined messages for all existing players msgs = [] for other in others: if other == player.name: continue msg = Message('JOINED') msg.player_name = other msgs.append(msg) # Prepare passed message to set correct turn current = self._get_artist() msg = Message('PASSED', player_name=current) if player.name == current: msg.word = redis.get(self.word_key) end_time = time.time() + 120 if not redis.setnx(self.end_key, end_time): # Clock's already started! end_time = redis.get(self.end_key) else: end_time = redis.get(self.end_key) assert(end_time is not None) msg.end_time = end_time msgs.append(msg) # Send all the prepared messages gevent.joinall([gevent.spawn(player.send, x) for x in msgs])
def process_next(): customer_queue = pickle.loads(redis.get('c_q')) processing, count = customer_queue.find_next_eligible() if not processing: return redirect('/admin') print(processing.name) processing.exp = time() + 300 currently_serving = pickle.loads(redis.get('c_s')) currently_serving[processing.uid] = processing print('expiring at ', currently_serving[processing.uid].exp) redis.set('c_s', pickle.dumps(currently_serving)) return redirect('/admin')
def join(self, player): if player.table == self: return # Already part of this table. if player.table is not None: player.table.leave(player) # Player has to leave old table msg = Message('JOINED') # Tell all the other players msg.player_name = player.name # that a new player has joined. self.send(msg) player.table = self # Register the new player with self.players.append(player) # this table. # Get a list of all the other players on this table others = redis.zrange(self.players_key, 0, -1) # Check if the player exists (race condition I guess) score = redis.zscore(self.players_key, player.name) if score is None: # Add new player to the player list redis.zadd(self.players_key, player.name, 0) # Add player to the turn list if he/she wasn't already there redis.zadd(self.turns_key, player.name, time.time()) # Prepare joined messages for all existing players msgs = [] for other in others: if other == player.name: continue msg = Message('JOINED') msg.player_name = other msgs.append(msg) # Prepare passed message to set correct turn current = self._get_artist() msg = Message('PASSED', player_name=current) if player.name == current: msg.word = redis.get(self.word_key) end_time = time.time() + 120 if not redis.setnx(self.end_key, end_time): # Clock's already started! end_time = redis.get(self.end_key) else: end_time = redis.get(self.end_key) assert (end_time is not None) msg.end_time = end_time msgs.append(msg) # Send all the prepared messages gevent.joinall([gevent.spawn(player.send, x) for x in msgs])
def getlatest(request): global redis int_fields = { 'ExcitementShortTerm', 'ExcitementLongTerm', 'FrustrationScore', 'LowerfaceValue', 'UpperfaceValue'} js_dict = { f: int(round(100 * float(redis.get(f) or 0))) for f in int_fields } str_fields = {'Lowerface', 'Upperface'} js_dict.update( {f: redis.get(f) for f in str_fields} ) #create data print js_dict return js_dict
def hello(): # to_store = read_tags() # to_store.append('test tag not on MER') # redis.delete('allTags') # for tag in to_store: # redis.rpush('allTags', tag) hint = redis.get('hint').decode('utf-8') sol = redis.get('sol').decode('utf-8') stored_tags = redis.lrange('allTags', 0, -1) MER_tags = read_tags() not_on_MER = [item for item in stored_tags if item not in MER_tags] return render_template("communicate.html", myhint=hint, mysol=sol, tags=MER_tags, tags_not_on_MER=not_on_MER)
def test(): r = requests.get('http://localhost:8000/products/ProductItems') print r.text data = json.loads(r.text) lost = [] for rfid in data: cur = rfid['RFID'] if redis.get(cur) == None: print 'lost',cur lost.append(cur) redis.set('lost', lost) print redis.get('lost')
def _listen_worker(self, channel_id, factory, processor): connection = redis.Redis(*self.address) self._create_channel(connection, channel_id) collection = connection[self._db][channel_id] cursor = redis.sort((redis.get("_id"),("_id", ASC))) while True: for envelope in cursor.interkeys(): msg = take_from_envelope(envelope, factory) processor.process(envelope[FROM_FIELD], envelope[TO_FIELD], channel_id, msg); collection.update({"_id": envelope["_id"]}, {"$set": {READ_FIELD: True}}) self._sleep(0.05) cursor = redis.sort((redis.get(TO_FIELD: self.get_id())),("_id", ASC))
def create_report(redis, id, name, email): print "%s <%s>/%d" % (name, email, id) # Gather data hits = int(redis.get('hit:%s' % id) or '0') hps = int(redis.get('honeypot:%s' % id) or '0') print " %d of %d blocked" % (hits-hps, hits) if hits > 0: send_report(name, email, hits, hps) # Clear processed data redis.decr('hit:%s' % id, hits) redis.decr('honeypot:%s' % id, hps)
def hello(): selbots = []; for key in sorted(["host:DevSystem0.local","host:DevSystem1.local","host:DevSystem2.local","host:DevSystem3.local"]): if (redis.get(key)): s = json.loads(redis.get(key)) # time in seconds since epoch as int now = int(time.time()) mins = (now - s['time']) / 60; rel = natural.date.delta(now, s['time'], words=False) s['server'] = key s['minutes_ago'] = mins s['time_ago_relative'] = rel[0] selbots.append(s) return render_template('house.html', servers=selbots)
def runScript(): conn = getMySQLConnection(); cursor = conn.cursor() tags = getAllTags(conn) redis = getRedisConnection() print "Redis Fetch (With caching)\n\n\n" query_durations = [0.0] * QUERIES_AMOUNT for i in range(QUERIES_AMOUNT): current_tag = random.choice(tags) start_time = datetime.now() redis_query_key = current_tag + ":" + str(QUERY_OFFSET) + ":" + str(QUERY_LIMIT) posts_json = redis.get(redis_query_key) posts = json.loads(posts_json) end_time = datetime.now() delta = end_time - start_time sec = delta.total_seconds() query_durations[i] = sec for post in posts: printPost(post) print '\n-----------------\n' print ("Query #" + str(i) + " with tag \'" + current_tag + "\' finished." " Duration: " + str(delta.total_seconds()) + " sec\n\n\n\n\n") print "\n\nAverage query time: " + str(sum(query_durations) / float(QUERIES_AMOUNT))
def dev_from_null_hyp(grid, use_relative_deviation=False): """ Calc deviation from null hypothesis """ hashkey = "hypothesis grid6" + str(xBins) #null_hyp_grid = cache.get(hashkey) serialized = redis.get(hashkey) if serialized: null_hyp_grid = pickle.loads(serialized) if isinstance(null_hyp_grid, np.ndarray): # Found in cache #print "null hypothesis grid loaded from cache" pass else: # Not found in cache print "null hypothesis not found in cache" lons, lats = get_enough_data() null_hyp_grid = gen_grid(lats, lons) #cache.set(hashkey, null_hyp_grid, timeout=60*60*24*31*99999) redis.set(hashkey, pickle.dumps(null_hyp_grid)) if use_relative_deviation: quotent = np.divide(grid - null_hyp_grid, null_hyp_grid) NaNs = np.isnan(quotent) quotent[NaNs] = 0 Infs = np.isinf(quotent) quotent[Infs] = 0 maxerr = quotent.max() quotent = quotent + maxerr else: # Use absolute deviation plus max element # to remove entries < 0 (best try so far) quotent = grid - null_hyp_grid + null_hyp_grid.max() return quotent, null_hyp_grid
def store_message(self, mailfrom, target, rcpttos, data): msg = email.message_from_string(data) headers = { "From": msg.get("From"), "To": msg.get("To"), "Subject": msg.get("Subject"), "Date": time.ctime(time.time()), } text_parts = [] html_parts = [] for part in msg.walk(): if part.get_content_type() == "text/plain": text_parts.append(part.get_payload()) elif part.get_content_type() == "text/html": html_parts.append(self.clean_html(part.get_payload())) simple_msg = {"headers": headers, "text_parts": text_parts, "html_parts": html_parts} simple_msg_json = json.dumps(simple_msg) timestamp = time.time() msgid = redis.get("msgid_counter") if msgid: msgid = redis.incr("msgid_counter") else: redis.set("msgid_counter", 1) msgid = 1 msgkey = "message:" + str(msgid) redis.set(msgkey, simple_msg_json) # storing the msg once redis.zadd("messages:" + target, msgkey, timestamp) # all messages to me redis.zadd("messages_from:" + target + ":" + mailfrom, msgkey, timestamp) # all messages from you to me
def get_static(cache_ignore=False): if not cache_ignore: champions = redis.get('static_champions') items = redis.get('static_items') if champions and items: champions = json.loads(champions) items = json.loads(items) items = {int(x):y for x,y in items.iteritems()} return champions, items champions = riot.static_get_champion_list(champ_data='tags,info')['data'] items = riot.static_get_item_list(item_list_data='depth,from')['data'] items = {int(x):y for x,y in items.iteritems()} redis.set('static_champions', json.dumps(champions)) redis.set('static_items', json.dumps(items)) return champions, items
def image(filename): gs_file_string = redis.get(filename) buffer_image = BytesIO() gs_image = Image.open(BytesIO(gs_file_string)) gs_image.save(buffer_image, 'JPEG', quality=90) buffer_image.seek(0) return Response(buffer_image.getvalue(), mimetype='image/jpeg')
def done(): id = request.form.get('id', None) if id: return redis.get(id.split('.gif')[0]) else: return 'error'
def rate(user_id, value, film_id=None, actor_id=None, director_id=None, type=1, overwrite=True, check_if_exists=False): user_key = "user:%s:ratings" % user_id key = _rating_key(film_id, actor_id, director_id, type) assert user_id if not overwrite or check_if_exists: exists = redis.get("user:%s:rating:%s" % (user_id, key)) is not None if not overwrite and exists: return exists else: exists = None with redis.pipeline() as pipe: if value: if type == 1 and film_id: pipe.hset(user_key, film_id, value) pipe.sadd("users", user_id) pipe.hset("ratings:%s" % key, user_id, value) pipe.set("user:%s:rating:%s" % (user_id, key), value) else: if type == 1: pipe.hdel(user_key, film_id) pipe.hdel("ratings" + key, user_id) pipe.delete("user:%s:rating:%s" % (user_id, key)) pipe.execute() return exists
def get_or_incr(): name = 'counter' post_key = 'incrBy' if request.method == 'POST' and post_key in request.form: redis.incr(name, request.form[post_key]) return redis.get(name) or '0'
def index(): value = redis.get('RUNNING') if value == 'true': running = True else: running = False return render_template('index.html', running=running)
def get_queue(session, redis=redis.Redis()): proposal_id = Utils._proposal_id(session) if proposal_id is not None: serialized_queue = redis.get("mxcube:queue:%d" % proposal_id) else: serialized_queue = None return new_queue(serialized_queue)
def get_features(key_prefix, id, type=None): # TODO - type support # assert key_prefix.startswith('cache_user') key = "%s:%s" % (key_prefix, id) features = redis.get(key) if not features: return dict() return pickle.loads(features)
def check_price_and_notify(delay): while 1: print "Notification is triggered!" # loading latest products in memory # build list product with decreasing prices products_prices_before = build_hash_from_json(json.loads(redis.get('optimusprice.prices'))) status_code, prices = get_all_prices() if (status_code == 200): products_prices_now = build_hash_from_json(prices) else: products_prices_now = products_prices_before users = redis.smembers("optimusprice.subscriptions.users") print "Subscribed users: ", users for chat_id in users: user_id = redis.get("optimusprice.mapping." + str(chat_id)) # updating user subscriptions status_code, products = get_likes_for_user(user_id) if status_code == 200: for p in products: redis.sadd("optimusprice.subscriptions." + str(chat_id), p) products_ids_to_notify = [] print products_prices_before.keys() print products_prices_now.keys() for p in products_prices_before.keys(): if products_prices_now.get(p) is not None: if float(products_prices_now.get(p)['price']) < float(products_prices_before.get(p)['price']): products_ids_to_notify.append(p) print "products_ids_to_notify ", products_ids_to_notify users = redis.smembers("optimusprice.subscriptions.users") print "subscribed users ", users for u in users: products_likes = map(int, redis.smembers("optimusprice.subscriptions." + str(u))) print "likes for user %s : %s" %(str(u), str(products_likes)) products_intersection = set(products_ids_to_notify).intersection(products_likes) print "products_intersection for user %s : %s" %(str(u), str(products_intersection)) if len(products_intersection) > 0: print "Notifying %s about %s" % (u,str(products_intersection)) for pp in products_intersection: bot.sendMessage(u, "Prices has changed for :" + products_prices_before.get(pp)['name']) f = open('images/%s.jpg' % pp, 'rb') # some file on local disk bot.sendPhoto(u, f) time.sleep(delay)
def status(): hits = redis.get("hits") if hits is None: hits = '0' hits = int(hits) redis.set('hits', hits + 1) return "OK"
def _thaw(prefix): import os.path thawed = None try: thawed = jsonpickle.decode(redis.get(prefix + PluginDirectory.FREEZER_KEY)) except: pass return thawed
def get_data(artist_name): '''Get the logged data for a given artist name''' res_string = redis.get(artist_name) res_dict = json.loads(res_string) res = jsonify(res_dict) res.headers['Access-Control-Allow-Origin'] = '*' return res
def isOnline(self): """ Returns true if the user is considered online by the system. """ value = redis.get("user:%s:ping" % self.id) or 0 if (time.time() - float(value)) < 30: return True return False