def current_app(config_class=Config): app = Flask(__name__) app.config.from_object(config_class) app.register_blueprint(errors.blueprint) @app.template_filter('ctime') def timectime(s): return datetime.datetime.fromtimestamp(s) @app.route('/robots.txt') @app.route('/sitemap.xml') def static_from_root(): return send_from_directory(app.static_folder, request.path[1:]) # Limit request, set to slow now because we're still in development phase limiter = Limiter(app, key_func=get_remote_address, default_limits=["200 per day", "50 per hour"]) from blocksmurfer.main import bp as main_bp app.register_blueprint(main_bp) babel.init_app(app) qrcode.init_app(app) from blocksmurfer.api import bp as api_bp app.register_blueprint(api_bp, url_prefix='/api/v1') limiter.init_app(app) return app
def setup_limiter(app): limiter = Limiter(key_func=get_remote_address) limiter.init_app(app) return limiter
from flask_limiter.util import get_remote_address from flask_limiter import Limiter from flasgger import Swagger from flasgger.utils import swag_from from flask_restful_swagger import swagger except Exception as e: print("Some modules are missing {}".format(e)) app = Flask(__name__) api = Api(app) limiter = Limiter(app, key_func=get_remote_address) limiter.init_app(app) api_swagger = swagger.docs(api, apiVersion='0.1', api_spec_url='/docs') class MyVMapi(Resource): decorators = [limiter.limit("10/day")] @swagger.model @swagger.operation(notes="good !!!") def get(self, ID): return {"Resources": 200, 'Data': ID} api.add_resource(MyVMapi, '/VM/<string:ID>')
try: from flask import Flask, request from flask_restful import Resource, Api from flask_restful import reqparse from flask_limiter.util import get_remote_address from flask_limiter import Limiter except Exception as e: print('Some Modules are Missing {}'.format(e)) app = Flask(__name__) api = Api(app) Limiter = Limiter(app, key_func=get_remote_address) Limiter.init_app(app) parser = reqparse.RequestParser() parser.add_argument('zip', type=str, required=True, help="Please enter zip code") parser.add_argument('city', type=str, required=True, help="please enter city") class MyApi(Resource): def __init__(self): self.__zip_code = parser.parse_args().get('zip', None) self.__city = parser.parse_args().get('city', None) def get(self): if len(self.__city) > 2 and len(self.__zip_code) > 2:
"jwt_service", "db", "mailer", "loop", "redis", ) loop = asyncio.get_event_loop() logger = logging.getLogger("Sayonika") sayonika_instance = cors( Sayonika(), allow_origin=["https://sayonika.moe", "*"], # Remove this one when ready for prod ) jwt_service = JWT(SETTINGS) mailer = Mailer(SETTINGS) limiter = Limiter( key_func=get_ratelimit_key, default_limits=SETTINGS.get("RATELIMITS", "5 per 2 seconds;1000 per hour").split( ";" ), ) redis = InitLaterRedis( ConnectionsPool(SETTINGS["REDIS_URL"], minsize=5, maxsize=10, loop=loop) ) # Use env vars to update config sayonika_instance.config.update(SETTINGS) limiter.init_app(sayonika_instance) logger.setLevel(logging.INFO)
def send_imessage(message, buddy): #TODO: Allow `"` to be sent. message = message.replace('"', "'") cmd = ('osascript<<END\n' 'tell application "Messages"\n' ' set targetService to 1st service whose service type = iMessage\n' ' set targetBuddy to buddy "{0}" of targetService\n' ' send "{1}" to targetBuddy\n' 'end tell\n' 'END') cmd = cmd.format(buddy,message) return str(os.system(cmd)) app = Flask(__name__) limiter = Limiter(key_func=get_remote_address) limiter.init_app(app) @app.route('/') def index(): return render_template('index.html') @app.route('/test') def test(): print 'Print hit.' return 'Return hit.' @app.route('/post', methods = ['POST']) @limiter.limit("1/second") def post(): if request.headers['Content-Type'] == 'text/plain': print request.data
from blogscraper.blogscraper.spiders.wiki_spider import WikiSpider from scrapy.crawler import CrawlerProcess import subprocess from scrapy.utils.project import get_project_settings import json from scrapy.settings import Settings from blogscraper.blogscraper import settings as wiki_settings Limiter=Limiter(blogapp,key_func=get_remote_address) Limiter.init_app(blogapp) parser=reqparse.RequestParser() parser.add_argument('keywords',type=str ,required=True,help="Please enter the Keyword before running the Scraper") class runscraper(Resource): def __init__(self): self.__keywords=parser.parse_args().get('keywords',None) def get(self): return {'data':self.__keywords} def post(self): # s=get_project_settings()
def create_app(**config_overrides): logger = logging.getLogger(__name__) logger.info('app starts...') app = Flask(__name__, template_folder="./") limiter = Limiter(key_func=get_remote_address) limiter.init_app(app) app.config.from_object(BaseConfig) if config_overrides: app.config.update(config_overrides) db.init_app(app) db.app = app migrate = Migrate(app, db) CORS(app) api = Api(app) from main import models db.create_all() app.before_request(create_before_request(app)) def ckdnearest(gdA, gdB): nA = np.array(list(gdA.geometry.apply(lambda x: (x.x, x.y)))) nB = np.array(list(gdB.geometry.apply(lambda x: (x.x, x.y)))) btree = cKDTree(nB) dist, idx = btree.query(nA, k=1) gdA['dist'] = dist return gdA[gdA.dist == gdA.dist.min()] def x(row): try: x = row[0] return x except Exception: pass def y(row): try: x = row[1] return x except Exception: pass @app.errorhandler(InvalidUsage) def handle_invalid_usage(error): """ Handles invalid api use """ response = jsonify(error.to_dict()) response.status_code = error.status_code return response df = pd.read_csv('stations.csv') gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy( df.place_geo_x, df.place_geo_y)) @app.route('/') def home_page(): example_embed = 'This string is from python' return render_template('index.html', embed=example_embed) @app.route('/api/audio_feed') def audio_play(): def fetch(): radio = 'http://10.5.0.11:8000/stream.mp3' r = requests.get(radio, stream=True) for chunk in r.iter_content(chunk_size=20000): yield chunk return Response(fetch(), mimetype="audio/mp3") @app.route('/api/queue', methods=["GET"]) def populate_queue(): """ Random station playback """ client = pyOSC3.OSCClient() client.connect(('10.5.0.11', 57120)) msg = pyOSC3.OSCMessage() msg.setAddress("/stop") msg.append('stopping') client.send(msg) res = StreamingAuto.get_last() res_user = StreamingUser.get_last() if not res_user: radio = res.url name = res.name country = res.country place_name = res.place_name else: radio = res_user.url name = res_user.name country = res_user.country place_name = res_user.place_name StreamingUser.query.filter(StreamingUser.name == name).delete() msg = pyOSC3.OSCMessage() msg.setAddress("/start") msg.append(radio) msg.append(np.random.exponential()) chunk = check_health(radio) song = AudioSegment.from_file(io.BytesIO(chunk), format="mp3") song.export("SuperCollider/out.wav", format="wav") f = open("SuperCollider/out.txt", "w") f.write(name) f.close() client.send(msg) if not res_user: jsonObj = {"msg": "{0} {1} {2}".format( res.country, res.place_name, res.name)} else: jsonObj = {"msg": "{0} {1} {2}".format( res_user.country, res_user.place_name, res_user.name)} # sleeping so backend and sc can 'sync' time.sleep(5) s = NowPlaying(country=str(country), place_name=str(place_name), name=str(name), url=str(radio), timestamp=datetime.now().strftime('%Y-%m-%d %H:%M:%S')) db.session.add(s) db.session.flush() db.session.commit() return jsonObj @app.route('/api/get_names', methods=["GET"]) def get_names(): res = NowPlaying.get_last() radio = res.url name = res.name country = res.country place_name = res.place_name jsonObj = {"msg": "{0} {1} {2}".format( res.country, res.place_name, res.name)} return jsonObj @app.route('/api/stop', methods=["GET"]) def stop_queue(): switcher = 0 client = pyOSC3.OSCClient() client.connect(('10.5.0.11', 57120)) msg = pyOSC3.OSCMessage() msg.setAddress("/stop") msg.append('stopping') client.send(msg) jsonObject = {"msg": "stop"} return jsonObject @ app.route('/api/random', methods=["GET"]) def send_random_station(): gpd1 = gdf gpd2 = gpd.GeoDataFrame([['test', Point(float(request.args['x']), float( request.args['y']))]], columns=['Place', 'geometry']) radio_list = gpd1[gpd1.country == request.args['country']] logger.info(request.args['country']) logger.info(radio_list) if not radio_list: radio_list = random.choice(ckdnearest(gpd1, gpd2)[ ['mp3', 'country', 'place_name', 'name']].values) logger.info(radio_list[0]) client = pyOSC3.OSCClient() client.connect(('10.5.0.11', 57120)) radio = radio_list[0] msg = pyOSC3.OSCMessage() msg.setAddress("/start") msg.append(radio) msg.append(np.random.exponential()) client.send(msg) station_string = np.array2string(radio_list[1:]).replace( '[', '').replace(']', '').replace("'", '') s = StreamingAuto(country=radio_list[1], place_name=radio_list[2], name=radio_list[3], url=radio, timestamp=datetime.now().strftime('%Y-%m-%d %H:%M:%S')) s.save() db.session.commit() jsonObject = {"msg": station_string} return jsonObject def check_health(station): """ Gets some mp3 chunks to check if they are valid """ radio = station r = requests.get(radio, stream=True) # get some chunks for chunk in r.iter_content(chunk_size=20000): return chunk # @ limiter.limit("1 per 20second") @ app.route('/api', methods=["GET"]) def send_station(): """ Handles user requests """ gpd1 = gdf gpd2 = gpd.GeoDataFrame([['test', Point(float(request.args['x']), float( request.args['y']))]], columns=['Place', 'geometry']) try: logger.info(gpd1[gpd1.country == request.args['country']]) radio_list = (gpd1[gpd1.country == request.args['country']].sample(1)[ ['mp3', 'country', 'place_name', 'name']].values)[0].tolist() logger.info(request.args['country']) logger.info(radio_list) if not radio_list: radio_list = random.choice(ckdnearest(gpd1, gpd2)[ ['mp3', 'country', 'place_name', 'name']].values) chunk = check_health(radio_list[0]) song = AudioSegment.from_file(io.BytesIO(chunk), format="mp3") song.export("SuperCollider/out.wav", format="wav") f = open("SuperCollider/out.txt", "w") f.write(radio_list[3]) f.close() res_user = StreamingUser(url=radio_list[0], country=radio_list[1], place_name=radio_list[2], name=radio_list[3], timestamp=datetime.now().strftime('%Y-%m-%d %H:%M:%S')) res_user.save() db.session.commit() jsonObj = {"msg": "{0} {1} {2}".format( res_user.country, res_user.place_name, res_user.name)} logger.info(radio_list[0]) except Exception as e: jsonObj = {"msg": "An error has occured. Please choose again"} logger.info(str(e)) return jsonObj def add_random_station(): # Need to try to catch corrupted mp3 before sending them to SC grrrr df = pd.read_csv('stations.csv') sample = df.sample() logger.info(sample) try: chunk = check_health(str(sample['mp3'].values[0])) song = AudioSegment.from_file(io.BytesIO(chunk), format="mp3") logger.info(str(sample['country'].values[0])) s = StreamingAuto(country=str(sample['country'].values[0]), place_name=str(sample['place_name'].values[0]), name=str(sample['name'].values[0]), url=str(sample['mp3'].values[0]), timestamp=datetime.now().strftime('%Y-%m-%d %H:%M:%S')) db.session.add(s) db.session.flush() db.session.commit() except Exception as e: logger.info(str(e)) db.session.rollback() pass scheduler = BackgroundScheduler() scheduler.add_job(func=populate_queue, trigger="interval", seconds=30) scheduler.add_job(func=add_random_station, trigger="interval", seconds=90) scheduler.start() # Shutdown your cron thread if the web process is stopped atexit.register(lambda: scheduler.shutdown()) return app
def create_app(): app = Flask(__name__, instance_relative_config=True) app.config.from_pyfile('config.py') db.init_app(app) from briefly.models.users import Users from briefly.models.authors import Authors redis_client.init_app(app) ch_file = handlers.RotatingFileHandler('logs/briefly.log', maxBytes=1000000, backupCount=5) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') ch_file.setFormatter(formatter) limiter = Limiter( app, key_func=get_remote_address, default_limits= [], # [count] [per|/] [n (optional)] [second|minute|hour|day|month|year] headers_enabled=True, strategy='fixed-window-elastic-expiry') limiter.logger.addHandler(ch_file) limiter.init_app(app) @limiter.request_filter def ip_whitelist(): return request.remote_addr == "127.0.0.1" from briefly.api.v1 import endpoints as api_v1 limiter.limit("20 per second")(api_v1.bp) app.register_blueprint(api_v1.bp) @app.route('/') def index(): return render_template('index.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/api') def api(): return render_template('api.html') @app.route('/reg', methods=['GET']) def reg(): return render_template('reg.html', email=email) @app.route('/regme', methods=['POST']) @limiter.limit('5 per day') def regme(): if request.method == 'POST' and request.form['emailaddress']: token = uuid.uuid4() email = request.form['emailaddress'] ip = request.remote_addr if not email: message = 'E-mail is required.' else: is_user_present = Users.query.filter_by( user_email=email).first() if is_user_present is None: new_user = Users(user_email=email, user_token=str(token), user_reg_ip=ip) db.session.add(new_user) db.session.commit() message = 'Token was sent to your e-mail' text_body = render_template('emails/new_user.txt', token=token) Thread(target=send_async_email, args=(current_app._get_current_object(), email, text_body)).start() else: message = 'E-mail is already registered.' flash(message) return redirect(url_for('reg')) @app.route('/authors/<letter>', methods=['GET']) @limiter.limit('100/second') def letter(letter): answer = {'status': False, 'number_of_records': 0, 'authors': []} letters = [ 'А', 'Б', 'В', 'Г', 'Д', 'Е', 'Ж', 'З', 'И', 'К', 'Л', 'М', 'Н', 'О', 'П', 'Р', 'С', 'Т', 'У', 'Ф', 'Х', 'Ц', 'Ч', 'Ш', 'Э', 'Ю', 'Я' ] if len(letter) == 1 and letter in letters: answer['status'] = True sql = f'SELECT id, author_fullname, author_url ' \ f'FROM authors ' \ f'WHERE author_fullname ' \ f'REGEXP "[а-яА-Я]+ [а-яА-Я]+ {letter}"' authors = Authors.query.from_statement(text(sql)).all() if authors: answer['number_of_records'] = len(authors) for author in authors: answer['authors'].append({ 'id': author.id, 'author_fullname': author.author_fullname, 'author_url': author.author_url }) return jsonify(answer), 200 @app.errorhandler(429) def ratelimit_handler(e): return make_response(jsonify(error="Too Many Requests"), 429) @app.errorhandler(400) def badrequest_handler(e): return make_response(jsonify(error="Bad Request"), 400) @app.errorhandler(405) def method_not_allowd_handler(e): return make_response(jsonify(error="Method Not Allowed"), 405) return app
if config.SAFE_RPROXY_FIX_IS_ON: FLASK_APP.wsgi_app = SaferProxyFix( FLASK_APP.wsgi_app, num_proxy_servers=config.SAFE_RPROXY_FIX_NUM_PROXY_SERVERS, detect_misconfiguration=config.SAFE_RPROXY_FIX_DETECT_MISCONFIGURATION, ) BLUEPRINT = Blueprint('aoikseldomstaticsiteapi', __name__, template_folder='templates') RATE_LIMITER = Limiter( strategy=config.RATE_LIMITING_STRATEGY, key_func=config.RATE_LIMITING_KEY_FUNC, ) RATE_LIMITER.init_app(FLASK_APP) @BLUEPRINT.route(config.COMMENTS_OF_POST_URI + '/<int:post_id>') @RATE_LIMITER.limit(config.GET_COMMENTS_RATE_LIMIT) def get_comments(post_id): with session_context() as session: comment_objs = session.query(Comment).filter( Comment.post_id == post_id, Comment.is_viewed == const.TBL_COMMENT_COL_IS_VIEWED_V_YES, Comment.is_blocked == const.TBL_COMMENT_COL_IS_BLOCKED_V_NO, ).order_by(Comment.create_time).all() comment_infos = [] for comment_obj in comment_objs:
def make_app(self, myaigroup): app = Flask(__name__) #self.requests_total = Counter("request_count", "Total request count of the ai") #limiter = Limiter(app, key_func=self.get_myrequest) limiter = Limiter(app, key_func=self.get_myrequest, default_limits=[ "{}/second".format(self.my_s_limiter), "{}/5seconds".format(self.my_5s_limiter) ]) limiter.init_app(app) # ========================================================= def health(): dic = {} now = time.time() try: #no image ,but text ======================================= data = '这是一段测试文本,公安部,法制,最高法院,司法独立' health_str = myaigroup.doais(data, self.ai_group_list) #print(health_str) ret = 'ok' for one_value in health_str: if one_value['code'] == '-1': ret = '-1' now2 = time.time() dic['health'] = ret dic['speed'] = str(round(now2 - now, 4)) return dic except Exception: exstr = traceback.format_exc() self.disulog.error(exstr) dic['health'] = "-1" dic['speed'] = '-1' return dic #=========================================================== # 当超过限速,返回错误信息 @app.errorhandler(429) def ratelimit_handler(e): mes = 'max_s_qps is={} max_5s_qps is={}'.format( str(self.my_s_limiter), str(self.my_5s_limiter)) ret = make_error(self.models_str, 'up_qps', mes) ret.update({"pro_ver": "{}".format(self.pro_ver)}) self.qps_counter = self.qps_counter + 1 print(e.description) return Response(json.dumps(ret), mimetype='application/json') @app.route('/logfiles') @limiter.exempt def logfiles(): strs = [] list = os.listdir('./log') for i in list: strs.append(i + '\n') return ''.join(strs) @app.route('/logfile') @limiter.exempt def logfile(): rf = request.args.get('f') strs = [] with open('./log/' + rf, 'r') as f: while 1: lines = f.readlines(100) if not lines: break for line in lines: strs.append(line) return ''.join(strs) @app.route('/about') @limiter.exempt def ais(): return 'server:{}, aimodel:{}, version:{}\n'.format( self.group_name, self.models_str, self.version) @app.route('/ais_all') @limiter.exempt def ais_all(): strs = [] for i in self.ai_group_list: strs.append('\n') strs.append(i) strs.append('\n--------------\n') num = 0 with codecs.open('./ai/' + i + '/labels.txt', 'r', 'utf-8') as f: while 1: lines = f.readlines(30) if not lines: break for line in lines: strs.append(line) num = num + 1 return ''.join(strs) @app.route('/areyouok') @limiter.exempt def areyouok(): dic = health() return 'health:{}, speed:{}s/per_dectection\n'.format( dic['health'], dic['speed']) """ @app.route('/gpu') @limiter.exempt def gpu(): return gputool.gpustatus_2(self.gpu_device)+"\n" # self.gpustatus_2() """ @app.route('/metrics') @limiter.exempt def metrics(): #a 是ai服务的基本信息 a='#Server information\n' \ '#server: {}\n' \ '#aimodel: {}\n' \ '#version: {}\n' \ '#pro_ver: {}\n' \ '#max_qps_s {}\n' \ '#max_qps_5s {}\n' \ .format(self.group_name, self.models_str.replace('#','_'), self.version, self.pro_ver, str(self.my_s_limiter), str(self.my_5s_limiter) ) dic = health() o_counter = self.myaigroup.overload_counter keys = o_counter.keys() if dic['health'] == 'ok': h_statu = 1 else: h_statu = 0 #b是简况检查结果 b = '#check one image on all models for testint health\nai_health {}\n' \ 'ai_one_request_duration_seconds {}\n'.format(h_statu, dic['speed']) #c 是超过qps限额的信息 c = "#up qpscounter\n" c = c + "up_qps_total {}\n".format(str(self.qps_counter)) #========= no gpu ===================================== #d 是gpu信息 [这个模型不需要GPU信息] #d = "#gpu status\n{}\n".format(gputool.gpustatus_1(self.gpu_device)) #e是核心ai接口信息 # ovsm 是总overload次数 ovsm = 0 ee = "" for key in keys: ee = ee + "{}_model_overload_total {}\n".format( key, str(o_counter[key])) ovsm = ovsm + o_counter[key] #e= prometheus_client.generate_latest(self.requests_total).decode('utf-8') #main_counter是系统技术器,记录实际调用所有模型 predict方法的次数 if self.main_counter == 0: overload_ratio = 0 else: overload_ratio = round((ovsm / self.main_counter), 4) e= "#core ai interface request times\n" \ "request_access_ai_total {}\n" \ "request_not_overload_total {}\n" \ "request_overload_ratio {}\n".\ format(self.main_counter,(self.main_counter-ovsm),overload_ratio) e = e + "server_overload_total {}\n".format(ovsm) e = e + ee f = "#ai server start time: {}\n".format(self.start_time) return Response("{}{}{}{}{}".format(a, b, c, e, f), mimetype="text/plain") @app.route('/manage', methods=['POST']) @limiter.exempt def mamage_word(): #这里会有中文问题,要解决掉============================== try: word = urllib.request.unquote(request.form.get('word')) todo = urllib.request.unquote(request.form.get('todo')) if todo == "change" or todo == "increase": kindnum = urllib.request.unquote( request.form.get('kindnum')) else: kindnum = 0 except (Exception): dic = {} dic['todo'] = todo dic['code'] = '-2' return Response(json.dumps(dic), mimetype='application/json') ret = myaigroup.manage_word(todo, word, kindnum) dic = {} dic['todo'] = todo dic['code'] = ret return Response(json.dumps(dic), mimetype='application/json') @app.route('/allwords') @limiter.exempt def allwords(): password = request.args.get('pass') if password == 'ilovehmaiproject': return myaigroup.allwords() else: return "你好, 欢迎关注 hmai !" @app.route('/ai', methods=['POST']) # 自定义限制器覆盖了默认限制器,参数说明如下: # 1. param limit_value: 访问限制阈值 # 2. param error_message: 错误的返回信息(注意,暂不支持中文,如果使用中文,请自定义错误页) # 3. param methods: 对哪些方法启用限制器? @limiter.limit("{}/second".format(self.my_s_limiter)) @limiter.limit("{}/5seconds".format(self.my_5s_limiter)) def get_tasks(): self.main_counter = self.main_counter + 1 #====== 校验 aigroup 参数传点 try: group = urllib.request.unquote(request.form.get('aigroup')) if group != self.group_name: ret = make_error(self.models_str, 'aigroup', 'aigroup error:{}'.format(group)) ret.update({"pro_ver": "{}".format(self.pro_ver)}) return Response(json.dumps(ret), mimetype='application/json') except: ret = make_error(self.models_str, 'aigroup', 'no aigroup') ret.update({"pro_ver": "{}".format(self.pro_ver)}) return Response(json.dumps(ret), mimetype='application/json') #==== 校验addition_info_in_place_ai参数,只有place模型需要使用这个参数 try: req_addition_info_in_place_ai = urllib.request.unquote( request.form.get('addition_info_in_place_ai')) if req_addition_info_in_place_ai != 'y': req_addition_info_in_place_ai = False else: req_addition_info_in_place_ai = True except: req_addition_info_in_place_ai = False #==== 校验debug参数,开debug,回回传picid try: req_debug = urllib.request.unquote(request.form.get('debug')) if req_debug != 'y': req_debug = False else: req_debug = True except: req_debug = False # === 校验 loc参数,是否显示坐标,flags, face模型使用该参数 try: req_loc = urllib.request.unquote(request.form.get('loc')) if req_loc != 'y': req_loc = False else: req_loc = True except: req_loc = False #================================================================================= #=== 校验附加参数1, 如果没有传入,则默认值 0 try: req_add_var_1 = urllib.request.unquote( request.form.get('add_var_1')) if req_add_var_1 != '0': pass else: req_add_var_1 = '0' except: req_add_var_1 = '0' #=== 校验附加参数2, 如果没有传入,则默认值 0 try: req_add_var_2 = urllib.request.unquote( request.form.get('add_var_2')) if req_add_var_2 != '0': pass else: req_add_var_2 = '0' except: req_add_var_2 = '0' #=== 校验附加参数3, 如果没有传入,则默认值 0 try: req_add_var_3 = urllib.request.unquote( request.form.get('add_var_3')) if req_add_var_3 != '0': pass else: req_add_var_3 = '0' except: req_add_var_3 = '0' # ================================================================================= # === 校验fr参数,默认f,代表fpr优先策略,如果是r,代表recall优先 try: req_fr = urllib.request.unquote(request.form.get('fr')) if req_fr != 'r': req_fr = 'f' else: req_fr = 'r' except: req_fr = 'f' try: image_data = None try: # 获取文字数据 image_data = urllib.request.unquote( (request.form.get('textdata'))) except Exception: # print('ba64 exp') image_data = None #if image_data == None: #req_imgurl = urllib.request.unquote(request.form.get('imgurl')) req_ais_str = urllib.request.unquote(request.form.get('ais')) if req_ais_str != 'ALL_GROUP': req_ais = req_ais_str.split('#') ais_ok = True ais_num = len(req_ais) if ais_num < 1: ais_ok = False for i in range(0, ais_num): if req_ais[i] not in self.ai_group_list: ais_ok = False break if ais_ok != True: ret = make_error( self.models_str, 'ainame', '{} bad ais parameter '.format(req_ais_str)) ret.update({"pro_ver": "{}".format(self.pro_ver)}) self.disulog.error(ret) return Response(json.dumps(ret), mimetype='application/json') else: req_ais = self.ai_group_list except (Exception): exstr = traceback.format_exc() print(exstr) # result={'error':'001 : param error'} ret = make_error(self.models_str, 'imgdata', exstr + 'fail fot geting image from data') ret.update({"pro_ver": "{}".format(self.pro_ver)}) self.disulog.error(ret) return Response(json.dumps(ret), mimetype='application/json') if image_data == None: ret = make_error(self.models_str, 'textdata', 'empty!') ret.update({"pro_ver": "{}".format(self.pro_ver)}) self.disulog.error(ret) return Response(json.dumps(ret), mimetype='application/json') try: if len(req_ais) <= 0: ret = make_error(self.ai_group_list, 'ainame', 'no req ai name') ret.update({'pro_ver': self.pro_ver}) return Response(json.dumps(ret), mimetype='application/json') add_var_1 = int(req_add_var_1) add_var_2 = int(req_add_var_2) add_var_3 = int(req_add_var_3) result_1 = myaigroup.doais( image_data, req_ais, needLocation=req_loc, placeai_addition=req_addition_info_in_place_ai, fr=req_fr, add_var_1=add_var_1, add_var_2=add_var_2, add_var_3=add_var_3) result = {} result['result'] = result_1 result.update({"pro_ver": "{}".format(self.pro_ver)}) if req_debug: result.update({ "picid": "{}".format( urllib.request.unquote(request.form.get('picid'))) }) return Response(json.dumps(result), mimetype='application/json') except (Exception) as e: exstr = traceback.format_exc() self.disulog.error(exstr) return '{}' try: if self.firstTime: dic = health() dic = health() self.firstTime = False else: dic = health() print(' 启动检查结果:{}, 单次处理时间:{}秒'.format(dic['health'], dic['speed'])) except Exception: exstr = traceback.format_exc() self.disulog.error(exstr) print(' 启动检查结果:失败') return app
from flask_httpauth import HTTPBasicAuth from passlib.apps import custom_app_context as pwd_context from itsdangerous import (TimedJSONWebSignatureSerializer as Serializer, BadSignature, SignatureExpired) from sqlalchemy.orm import relationship from sqlalchemy import exc # initialize our Flask application and Redis server app = flask.Flask(__name__) limiter = Limiter( app, key_func=get_remote_address, default_limits=["2 per minute", "1 per second" ], #limiting the number of requests that can be sent ) limiter.init_app(app), ratelimit_storage_url = 'redis://rate-limiting.amazonaws.com:6379' db = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB) app.config['DEBUG'] = True app.config['SECRET_KEY'] = 'super-secret' app.config['SECURITY_PASSWORD_SALT'] = 'super-secret' # app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///cbir.sqlite' # app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://mudzi:z32EM2%GhK%[email protected]/cbir' app.config[ 'SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://tariro:yBNI#$nH%S]]@mysql8test.cd81orwyjedt.us-east-1.rds.amazonaws.com/mysql8test' app.config['SECURITY_TOKEN_AUTHENTICATION_KEY'] = 'token' login_serializer = URLSafeTimedSerializer(app.config['SECRET_KEY'])
def init_limiter(app): """限制访问视图函数的次数, 每天200次""" limiter = Limiter(key_func=get_remote_address, default_limits=['300/day, 20/minute, 10/second']) limiter.logger.addHandler(logging.StreamHandler()) limiter.init_app(app)
# # Login init # login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view = "login" logger.info("Initialized logins...") # # Rate-limiter init # rate_limiter = Limiter(app, key_func=get_remote_address) rate_limiter.init_app(app) # # Oauth Handlers and Login # fbshim = facebookShim.FacebookShim() oauth = OAuth() facebook = oauth.remote_app( 'facebook', base_url='https://graph.facebook.com/', request_token_url=None, access_token_url='/oauth/access_token', authorize_url='https://www.facebook.com/dialog/oauth', consumer_key=settings.FACEBOOK['APP_ID'], consumer_secret=settings.FACEBOOK['APP_SECRET'],