def scheduler_redis_job(): #print('Redis_job: updating data in redis DB') logger.info('Redis_job: updating data in redis DB') database_lxdservers_list = redis_store.keys('servers:*') #print(database_lxdservers_list) for serverkey in database_lxdservers_list: lxdserver = json.loads(redis_store.get(serverkey)) all = [] try: res = lxd_api_get_scheduler(lxdserver, 'instances') for c in res.json()['metadata']: all.append(c[15:]) # get instance name from api url except Exception as e: print(e) #print(all) if len(all) > 0: for c in all: res = lxd_api_get_scheduler(lxdserver, 'instances/' + c) redis_store.set( 'server:' + lxdserver['name'] + ':instance:' + c + ':info', json.dumps(res.json()['metadata'])) # print(res.json()['metadata']) res_state = lxd_api_get_scheduler(lxdserver, 'instances/' + c + '/state') redis_store.set( 'server:' + lxdserver['name'] + ':instance:' + c + ':state', json.dumps(res_state.json()['metadata']))
def add_following(current_userid, to_follow_userid): user_info_json = UserInfo.get_user_info(current_userid) user_info_json["followings"]\ .append({"userid": to_follow_userid, "email": UserInfo.get_user_email(to_follow_userid)}) user_info_str = json.dumps(user_info_json) redis_store.set(current_userid, user_info_str)
def get_image_code(): """提供图片验证码 1.接受请求,获取uuid 2.生成图片验证码 3.使用UUID存储图片验证码内容到redis 4.返回图片验证码 """ # 1.接收请求,获取前端的uuid uuid = request.args.get('uuid') last_uuid = request.args.get('last_uuid') if not uuid: abort(403) # 2.生成验证码 name, text, image = captcha.generate_captcha() # 3. 使用UUID存储图片验证码内容到redis try: if last_uuid: # 上次的uuid若还存在,删除上次的uuid对应的记录 redis_store.delete('ImageCode:' + last_uuid) # 保存本次需要记录的验证码数据 redis_store.set('ImageCode:' + uuid, text, constants.IMAGE_CODE_REDIS_EXPIRES) except Exception as e: logging.error(e) return jsonify(errno=RET.DBERR, errmsg=u'保存验证码失败') # 4.返回图片验证码 response = make_response(image) response.headers['Content-Type'] = 'image/jpg' return response
def set(): name = request.form["name"] session["logged_in"] = name # user = User.query.fliter_by(username = name) redis_store.set(name, time.time()) return "I'am %s" % (name)
def wrapped(*args, **kw): # 存入redis的key key = ":".join(["ratelimit", by()]) # 获取单位时间内剩余的请求次数 try: remaining = requests - int(redis_store.get(key)) except (ValueError, TypeError): remaining = requests redis_store.set(key, 0) # 获取剩余单位时间周期的时间(秒) ttl = redis_store.ttl(key) if ttl < 0: # 已过期,则设置过期时间(ttl = -2, ttl = -1) redis_store.expire(key, window) ttl = window # 将rate limites情况写入g g.view_limits = (requests, remaining - 1, time.time() + ttl) if remaining > 0: # 剩余请求次数>0,则redis记录+1,并进入后续处理 redis_store.incr(key, 1) # 未达到限制次数,记录到g,方便dispatch处理 g.status_code = 200 return func(*args, **kw) else: # return make_response('Too Many Requests', 429) # 这里无法直接返回429,而是记录到g.status_code, 方便dispatch处理 g.status_code = 429 return func(*args, **kw)
def record_name(name): # 检查是否访问过 result = redis_store.get(name) if result is not None: return True redis_store.set(name, 1, 86400) return False
def sigin(): username = request.form['username'] password = request.form['password'] user = db.get_by_id(table="tb_user", field="username", value=username) if not user or not pbkdf2_sha256.verify(password, user[0]['password']): return response(status_code=401, data="You Not Authorized") else: random_string = uuid.uuid4() raw_token = '{}{}'.format(random_string, username) access_token = hashlib.sha256(raw_token.encode('utf-8')).hexdigest() userdata = db.get_by_id(table="tb_userdata", field="id_userdata", value=user[0]['id_userdata']) stored_data = { 'id_userdata': user[0]['id_userdata'], 'email': userdata[0]['email'], 'username': username } dill_object = dill.dumps(stored_data) redis_store.set(access_token, dill_object) redis_store.expire(access_token, 3600) data = { 'email': userdata[0]['email'], 'Access-Token': access_token, 'expires': 3600 } return response(200, data=data)
def get_areas_info(): """ 查询城区信息 :return: """ # 0.先从缓存中去取,如果缓存中没有,再去数据库中取 try: areas = redis_store.get('area_info') except Exception as e: logging.error(e) areas = None # 0.1 如果不为空,做查询操作 if areas and len(re.findall(r'aid', areas)) > 0: return jsonify(errno=RET.OK, errmsg='获取成功', data=eval(areas)) # 1.查询数据库 try: areas = Area.query.all() except Exception as e: logging.error(e) return jsonify(errno=RET.DBERR, errmsg='获取城区信息失败') # 2.组成字典,以便json areas_list = [] for area in areas: areas_list.append(area.to_dict()) # 0.2 存储json_areas数据到redis缓存中 try: redis_store.set('area_info', areas_list, constants.AREA_INFO_REDIS_EXPIRES) except Exception as e: logging.error(e) # 3. 返回数据 return jsonify(errno=RET.OK, errmsg='获取成功', data=areas_list)
def getIncEntityID(eventName): ID = redis_store.get(eventName + '.entityID') if ID is None: redis_store.set(eventName + '.entityID', 1) ID = 1 redis_store.incr(eventName + '.entityID') return int(ID)
def io_map_send(): login = session['user'] try: x = int(redis_store.get(login + ':player.x')) y = int(redis_store.get(login + ':player.y')) visible = msgpack.loads(redis_store.get(login + ':visible')) coins = int(redis_store.get(login + ':coins')) except: return data = copy.deepcopy(level) data['player']['x'] = x data['player']['y'] = y data['coins'] = coins data['ghosts'] = {} for k in user_logins: if k in hidden_users: continue try: px = int(redis_store.get(k + ':player.x')) py = int(redis_store.get(k + ':player.y')) except: continue data['ghosts'][k] = {'x': px, 'y': py} for door in level['doors']: if redis_store.get(login + ':solved:' + str(level['doors'][door])): data['doors'][door] = -1 for i in range(level['width']): for j in range(level['height']): if level['level'][j][i] == '$': if is_coin_picked(i, j): data['level'][j][i] = ' ' emit('map.recv', msgpack.dumps(data)) if update_visible(visible, x, y, True): redis_store.set(login + ':visible', msgpack.dumps(visible)) emit('map.visible', msgpack.dumps(visible))
def captcha_image(): """ 获取图片验证码 :return: """ # 获取参数 cur_id = request.args.get("cur_id") pre_id = request.args.get("pre_id") # 调用generate_captcha获取图片验证码编号,验证码值,图片(二进制) name, text, image_data = captcha.generate_captcha() # 3.将图片保存至redis try: # 参数1: key, 参数2: value, 参数3: 有效期 redis_store.set("image_code:%s" % cur_id, text.lower(), constants.IMAGE_CODE_REDIS_EXPIRES) # 4.判断是否有上一次的图片验证码 if pre_id: redis_store.delete("image_code:%s" % pre_id) except Exception as e: current_app.logger.error(e) return "图片验证码操作失败" # 5返回图片 response = make_response(image_data) response.headers["Content-Type"] = "image/png" return response
def post(self): """ Add new lxd server to lxdmanager """ data = request.get_json()['data'] #print(data) current_identity = import_user() if current_identity.admin: app.logger.info('User: %s adding new server to lxdmanager', import_user().username) res = lgw.send_cert_to_server(data['name'], data['address'], data['password']) server = Server() server.name = data['name'] server.address = data['address'] server.exec_address = data['exec_address'] server.verify = data['verify'] server.key_private = data['name'] + '_key.key' server.key_public = data['name'] + '_key.crt' db.session.add(server) db.session.commit() # update redis DB with actual server redis_store.set('servers:' + server.name, json.dumps(server.__jsonapi__('redis'))) return res.json()
def update_cache(): urls = ['http://mp.weixin.qq.com/mp/homepage?__biz=MzI1MzA1MzQ0MA==&' \ 'hid=3&sn=2058fc67f54c5b913396dab4db8149ff&begin=0&count=29&action=appmsg_list&f=json', 'https://mp.weixin.qq.com/mp/homepage?__biz=MzU1NDkwNDY5MQ==&'\ 'hid=1&sn=d19338069924da83163881e59cc771ca&begin=0&count=29&action=appmsg_list&f=json' ] headers = { 'User-Agent': 'Mozilla/5.0 (compatible; MSIE 10.0; ' + 'Windows NT 6.2; Trident/6.0)' } news_list = [] for url in urls: try: res = requests.get(url, timeout=6, headers=headers) appmsg_list = res.json()['data']['homepage_render']['appmsg_list'] except Exception as e: logging.warning(u'连接超时出错:%s' % e) return {} for appmsg in appmsg_list: news_data = { "title": appmsg['title'], "url": quote(appmsg['link']), "author": appmsg['author'], "type": "xm", } news_list.append(news_data) redis_store.set(redis_plugin_prefix, pickle.dumps(news_list), 3600 * 10) return news_list
def test(): a = 'abc' redis_store.set('aa', a) b = redis_store.get('aa') #job.delay(1, 2) #c = current_app.config['TEST'] return str(c)
def get_session(force=False): session = None if not force: session = redis_store.get(REDIS_KEY) if session: return session.decode() try: session = subprocess.check_output([ EJUDGE_CONTESTS_CMD_PATH, str(EJUDGE_CONTEST_ID), "master-login", "STDOUT", EJUDGE_USER_LOGIN, EJUDGE_USER_PASSWORD ]) session = session.strip() except: return None if not session: return None redis_store.set(REDIS_KEY, session) redis_store.expire(REDIS_KEY, TTL) return session.decode()
def confirm_role_details(): redis_store.set('roles_seen', 0) if request.method == 'POST': redis_store.hmset('contact', request.form.to_dict()) data = { 'role': { 'caption': 'Role details', 'row_data': redis_store.hgetall('role') }, 'logistics': { 'caption': 'Logistical details', 'row_data': redis_store.hgetall('logistics') }, 'security': { 'caption': 'Security details', 'row_data': redis_store.hgetall('security') } } skills = redis_store.lrange('skills', 0, -1) for s in skills: skill_data = { 'row_data': json.loads(redis_store.get(s)), 'caption': s } data[s] = skill_data return render_template('submit/confirm-role-details.html', data=data)
def in_chat(chat): """ 用户进入聊天页面,将用户的id和sid绑定,为chat_chatid_sid_userid = sid :return: """ print('---------------用户进入聊天页面---------------') print(chat, request.sid) print(session.get('nickname')) print('---------------end---------------') if not chat: return sid = request.sid user_id = session.get('id') # 获取聊天类型,若为1,则为单聊,组合聊天对象,设置用户聊天ID chat_sid_usertouserid_userid chat_obj_id = chat['chat_obj_id'] chat_type = chat.get('type') if chat_type not in [1, 2]: return if chat_type == 1: user_to_user = '******'.join(sorted([str(user_id), str(chat_obj_id)])) chat_key = 'chat_sid_%s_%s' % (user_to_user, user_id) # 否则聊天类型为群组,设置为群组加用户ID chat_sid_groupid_userid else: chat_key = 'chat_sid_%s_%s' % (chat_obj_id, user_id) redis_store.set(chat_key, sid, 3600 * 12)
def check_service_over_daily_message_limit(key_type, service): if key_type != KEY_TYPE_TEST: if current_app.config['REDIS_ENABLED']: cache_key = daily_limit_cache_key(service.id) service_stats = redis_store.get(cache_key) if not service_stats: service_stats = services_dao.fetch_todays_total_message_count( service.id) redis_store.set(cache_key, service_stats, ex=3600) if int(service_stats) >= service.message_limit: current_app.logger.error( "service {} has been rate limited for daily use sent {} limit {}" .format(service.id, int(service_stats), service.message_limit)) raise TooManyRequestsError(service.message_limit) return # TODO: remove this block when redis is re-enabled in live service_stats = services_dao.fetch_todays_total_message_count( service.id) if int(service_stats) >= service.message_limit: current_app.logger.error( "service {} has been rate limited for daily use sent {} limit {}" .format(service.id, int(service_stats), service.message_limit)) raise TooManyRequestsError(service.message_limit)
def warn_about_daily_message_limit(service, messages_sent): nearing_daily_message_limit = messages_sent >= NEAR_DAILY_LIMIT_PERCENTAGE * service.message_limit over_daily_message_limit = messages_sent >= service.message_limit current_time = datetime.utcnow().isoformat() cache_expiration = int(timedelta(days=1).total_seconds()) # Send a warning when reaching 80% of the daily limit if nearing_daily_message_limit: cache_key = near_daily_limit_cache_key(service.id) if not redis_store.get(cache_key): redis_store.set(cache_key, current_time, ex=cache_expiration) send_notification_to_service_users( service_id=service.id, template_id=current_app.config["NEAR_DAILY_LIMIT_TEMPLATE_ID"], personalisation={ "service_name": service.name, "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", "message_limit_en": "{:,}".format(service.message_limit), "message_limit_fr": "{:,}".format(service.message_limit).replace(",", " "), }, include_user_fields=["name"], ) # Send a warning when reaching the daily message limit if over_daily_message_limit: cache_key = over_daily_limit_cache_key(service.id) if not redis_store.get(cache_key): redis_store.set(cache_key, current_time, ex=cache_expiration) send_notification_to_service_users( service_id=service.id, template_id=current_app. config["REACHED_DAILY_LIMIT_TEMPLATE_ID"], personalisation={ "service_name": service.name, "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", "message_limit_en": "{:,}".format(service.message_limit), "message_limit_fr": "{:,}".format(service.message_limit).replace(",", " "), }, include_user_fields=["name"], ) current_app.logger.info( "service {} has been rate limited for daily use sent {} limit {}". format(service.id, int(messages_sent), service.message_limit)) if service.restricted: raise TrialServiceTooManyRequestsError(service.message_limit) else: raise LiveServiceTooManyRequestsError(service.message_limit)
def check_service_message_limit(key_type, service): if key_type != KEY_TYPE_TEST: cache_key = redis.daily_limit_cache_key(service.id) service_stats = redis_store.get(cache_key) if not service_stats: service_stats = services_dao.fetch_todays_total_message_count(service.id) redis_store.set(cache_key, service_stats, ex=3600) if int(service_stats) >= service.message_limit: raise TooManyRequestsError(service.message_limit)
def post(self): """ Logout Request :return: """ jti = get_raw_jwt()['jti'] ttl = redis_store.ttl(jti) redis_store.set(jti, 'true', ttl) return {'status': "Logged out successfully"}
def get_cookie(): one_time_code = request.form.get("one_time_code") valid = redis_store.get(one_time_code) if valid and redis_store.hget(valid, "uuid"): session["logged_in"] = True session["username"] = valid redis_store.set(valid + ":logged_in", True) return "successful login" return "failed"
def autocomplete_cities(): """Autocomplete for cities.""" query = request.args.get("query") redis_key = f"autocomplete_cities|{query}" # Try to find with Redis. try: result = redis_store.get(redis_key) redis_is_connected = True if result: return jsonify(suggestions=pickle.loads(result)) except RedisConnectionError: redis_is_connected = False # Try to find with Elasticsearch. try: cities = es.search( index="airtickets-city-index", from_=0, size=10, doc_type="CityName", body={ "query": { "bool": { "must": { "match_phrase_prefix": { "value": { "query": query } } } } }, "sort": { "population": { "order": "desc" } }, }, ) result = [city["_source"] for city in cities["hits"]["hits"]] except (ElasticConnectionError, NotFoundError, AttributeError): # Try to find with PostgreSQL. cities = (CityName.query.join( City.city).filter(CityName.name.like(query + "%")).distinct( City.population, CityName.city_id).order_by(City.population.desc(), CityName.city_id).limit(10).all()) result = [city.autocomplete_serialize() for city in cities] if redis_is_connected: redis_store.set(redis_key, pickle.dumps(result), 86400) return jsonify(suggestions=result)
def delete(self): """ Revoke token """ jti = get_raw_jwt()['jti'] if not jti: api.abort(code=404, message='Token not found') redis_store.set('access_jti:' + jti, 'true', app.config['ACCESS_TOKEN_EXPIRES']) return {"msg": "Access token revoked"}, 200
def logistical_details(): if request.method == 'POST': prior_role = 'Generalist skills' redis_store.set(prior_role, skill_dump(request.form)) redis_store.rpush(skills, prior_role) question = { 'department': { 'for': 'Department', 'label': 'What department or agency is this role in?', 'hint': "What's your organisation generally known as?" }, 'directorate': { 'for': 'Directorate', 'label': 'Which business area or directorate is this role in?', 'hint': 'This should describe the team or business area in which the Fast Streamer will be working.' }, 'location': { 'for': 'Location', 'label': 'Please give an address for this role', 'hint': 'Please include a postcode. This might not be where the Fast Streamer will spend' 'all their time, but it will help us decide whether they\'ll need to relocate' }, 'experience': { 'heading': 'How much experience do you expect the Fast Streamer to already have to be effective in ' 'this role?', 'name': 'post-length', 'values': { '0 - 6 months': 1, '12 - 18 months': 2, '2 years': 3, '3 years': 4 }, 'for': 'Experience required', 'hint': 'Remember that this is the amount of general DDaT experience, rather than experience in ' 'this area' }, 'ongoing': { 'heading': 'Is this post a one-off, or ongoing?', 'name': 'ongoing', 'values': { 'One-off': 'one-off', 'Ongoing': 'ongoing' }, 'for': 'Ongoing or one-off?' }, 'start': { 'for': 'Start month', 'label': 'What month would you prefer the Fast Streamer start?', 'hint': 'The start date will generally be 1st of the month, unless the Fast Streamer has already booked ' 'some leave.' } } return render_template('submit/logistical-details.html', question=question)
def check_service_over_daily_message_limit(key_type, service): if key_type != KEY_TYPE_TEST and current_app.config["REDIS_ENABLED"]: cache_key = daily_limit_cache_key(service.id) messages_sent = redis_store.get(cache_key) if not messages_sent: messages_sent = services_dao.fetch_todays_total_message_count( service.id) redis_store.set(cache_key, messages_sent, ex=int(timedelta(hours=1).total_seconds())) warn_about_daily_message_limit(service, int(messages_sent))
def in_chat_list(): """ 用户进入聊天列表,将用户的id和sid绑定,为chat_list_sid_userid = sid :return: """ print('---------------用户进入聊天列表-----------------') sid = request.sid user_id = session.get('id') print(user_id, session.get('nickname'), sid) print('---------------end-----------------') chat_key = 'chat_list_sid_%s' % user_id redis_store.set(chat_key, sid, 3600 * 12)
def post(self): """ Get new token with valid token :return new access_token """ current_identity = import_user() access_token = create_access_token(identity=current_identity) access_jti = get_jti(encoded_token=access_token) redis_store.set('access_jti:' + access_jti, 'false', app.config['ACCESS_TOKEN_EXPIRES']) ret = {'access_token': create_access_token(identity=current_identity)} return ret
def update_bids(bids): """ 设置bids进redis @param bdis 需要设置的bids,列表类型 @return True """ # 设置bids进redis redis_store.set('bids', json.dumps(bids)) # 30天过期 redis_store.expire('bids', 60 * 60 * 24 * 30) return True
def code(): """验证码""" infor = Code().creat_code() code_id = str(request.args.get('code_id') or '') redis_store.set(code_id, infor['code']) redis_store.expire(code_id, 30) image_path = infor["image_path"] #print(infor, code_id) with open(image_path, 'rb') as f: image_content = f.read() f.close() os.remove(image_path) return Response(image_content, mimetype='jpeg')
def add_post(userid, text, image): image_key = None if image is not None: images_storage = ImagesStorage() image_key = images_storage.put_image(image) user_info_json = UserInfo.get_user_info(userid) user_info_json["posts"].append({ "text": text, "timestamp": time.time(), "image_key": image_key }) user_info_str = json.dumps(user_info_json) redis_store.set(userid, user_info_str)
def delete_following(current_userid, to_unfollow_userid): user_info_json = UserInfo.get_user_info(current_userid) unfollow_user_index = -1 for i, following in enumerate(user_info_json["followings"]): if following["userid"] == to_unfollow_userid: unfollow_user_index = i break if unfollow_user_index == -1: raise ValueError("Cannot unfollow not followed user.") else: del user_info_json["followings"][unfollow_user_index] user_info_str = json.dumps(user_info_json) redis_store.set(current_userid, user_info_str)
def get_users(): output = { 'c': 0, 'users': [] } query_time = redis_store.get('query_time') if query_time is None: query_time = time.time() redis_store.set('query_time', query_time, 3600) output['query_time'] = query_time user = User.query.all() for u in user: output['users'].append({'uid': u.uid, 'name': u.name}) session['time'] = time.time() return api_response(output)
def reload_redis(single_domain=None): if single_domain is None: redis_store.flushdb() domains = Domain.query.all() for domain in domains: if domain.priority: domain_ip_list = [] domain_priority_list = [] for single_ip in domain.ips: domain_ip_list.append(single_ip.ip) domain_priority_list.append(single_ip.priority) redis_store.set(domain.domain, pickle.dumps(domain_ip_list)) domain_priority_list = generate_priority_list(domain_priority_list) redis_store.set('%s_priority' % domain.domain, pickle.dumps(domain_priority_list)) else: redis_store.set(domain.domain, 0) for single_ip in domain.ips: redis_store.set('%s_%s' % (domain.domain, single_ip.province_name), single_ip.ip) else: domain = Domain.query.filter_by(domain=single_domain).first() if domain.priority: domain_ip_list = [] domain_priority_list = [] for single_ip in domain.ips: domain_ip_list.append(single_ip.ip) domain_priority_list.append(single_ip.priority) redis_store.set(domain.domain, pickle.dumps(domain_ip_list)) domain_priority_list = generate_priority_list(domain_priority_list) redis_store.set('%s_priority' % domain.domain, pickle.dumps(domain_priority_list)) else: redis_store.set(domain.domain, 0) for single_ip in domain.ips: redis_store.set('%s_%s' % (domain.domain, single_ip.province_name), single_ip.ip)
def match_incr(): matches = redis_store.get("match_count") or db.session.query(Match).count() redis_store.set("match_count", int(matches) + 1)
def player_incr(): players = redis_store.get("player_count") or db.session.query(Player).count() redis_store.set("player_count", int(players) + 1)