def change_user_info(): print(request.form) username = request.form.get('username', default=None) old_password = request.form.get('old_password', default=None) new_password = request.form.get('new_password', default=None) email = request.form.get('email', default=None) residence = request.form.get('residence[2]', default='') delivery = inputs.boolean(request.form.get('delivery', default=None)) face2face = inputs.boolean(request.form.get('face2face', default=None)) phone = request.form.get('phone', default=None) if g.user.username == username: if not g.user.verify_password(old_password): return jsonify({'status': 0, 'msg': 'Old password is wrong!'}) g.user.update(new_password, email, residence, delivery, face2face, phone) db.session.add(g.user) db.session.commit() return jsonify({ 'status': 1, 'msg': 'Success. You may have to re-login' }) else: return jsonify({'status': 0, 'msg': 'It is not your account!'})
def post(self) -> (str, int): """ API resource class which updates a users admin and/or activated field Parameters can be passed using a POST request that contains a JSON with the following fields: :required: valid access JWT where the admin claim has to be true :param email: users email address :param activated: the value of the user's activated field :param admin: whether the the value of the user's activated field :type email: str :type activated: str :type admin: str :return: The user's credentials on success or an error message and corresponding status code when unsuccessful """ args = self.post_reqparser.parse_args() # User needs admin rights to continue if not get_jwt_claims()['admin']: abort(HTTPStatus.FORBIDDEN.value, error="administration privileges required") # Get user instance user = Users.find_by_email(args["email"]) if not user: return abort(HTTPStatus.BAD_REQUEST.value, error='User not found') if "activated" in args: user.activated = inputs.boolean(args["activated"]) if "admin" in args: user.admin = inputs.boolean(args["admin"]) # Save changes to user permissions user.save() user.commit() return user.json(), HTTPStatus.OK.value
def setSwitchData(jsonData): resp = {} print("TEST: ", jsonData) if SwitchRequest.waterPump in jsonData: pumpValue=inputs.boolean(jsonData[SwitchRequest.waterPump]) pumpSwitch.setSwitch(pumpValue) resp[SwitchRequest.waterPump]=pumpValue if SwitchRequest.lights in jsonData: lightValue=inputs.boolean(jsonData[SwitchRequest.lights]) lightSwitch.setSwitch(lightValue) resp[SwitchRequest.lights]=lightValue return resp
def get(self): query_string_parser = reqparse.RequestParser() query_string_parser.add_argument("crn", type=int, location="args") query_string_parser.add_argument("name", type=str, location="args") query_string_parser.add_argument("subject", type=str, location="args") query_string_parser.add_argument("id", type=int, location="args") query_string_parser.add_argument("isCurrentTerm", type=boolean, location="args") query_string_parser.add_argument("limit", type=int, location="args") args = query_string_parser.parse_args() print("Fuzzy search query {}".format(args)) try: search_result = search_courses( args.get("crn"), args.get("name"), args.get("subject"), args.get("id"), boolean(args.get("isCurrentTerm")) if args.get("isCurrentTerm") is not None else True, args.get("limit") ) print("Query result {}".format(search_result)) (status, result) = search_result return { "status": "success" if status == 0 else "failed", "description": "Sections matched by fuzzy search", "response": result } except Exception as e: print(e) abort_invalid_request("Invalid request: {}".format(args))
def index(): text = request.values.get("text") if text != None: print("request: " + text) print(nertagger(text)) result = "" show_version_param = request.values.get("showVersion") if show_version_param != "" and show_version_param != None: show_version = False try: show_version = inputs.boolean(show_version_param) except ValueError: print("Invalid value for parameter showVersion: " + show_version_param) if show_version == True: result += "FiNER, version " + os.environ['TAGTOOLS_VERSION'][ 1:] + "\n\n" for sentence in nertagger(text): for word in sentence: result += word[0] + "\t" + word[1] + "\n" result += "\n" return Response(result, mimetype="text/plain") else: return Response( "Error - You should provide the input text as 'text' GET/POST parameter\n", status=500, mimetype="text/plain")
def get(self, query): with_pdb = boolean(request.args.get('with_pdb', 'true')) response = {'query': query, 'with_pdb': with_pdb} output = self.get_descriptions(query, with_pdb) if output: response['output'] = output #[o[0]+o[1] for o in output] response['size'] = len(response['output']) return response
def post(self, user): data = request.form album_name = data.get("name") if not album_name: return jsonify(success=False), 400 album = Album() album.title = album_name albumDate = data.get("albumDate") if albumDate: albumDate = datetime.strptime(albumDate, ISO_DATE_DEF) album.date = albumDate receptionAppropriate = inputs.boolean(data.get("receptionAppropriate")) if receptionAppropriate: album.receptionAppropriate = receptionAppropriate #data to add to the image photographer = data.get("photographer") needsCred = inputs.boolean(data.get("needsCred")) editingAllowed = inputs.boolean(data.get("editingAllowed")) photos = request.files.getlist("photos") if photos: for photo in photos: image = Image() image.url = upload_album_photo(photo, album.title) if photographer: image.photographer = photographer if albumDate: image.date = albumDate image.needsCred = needsCred image.editingAllowed = editingAllowed album.images.append(image) db.session.add(image) ## TODO: Details for each photo db.session.add(album) db.session.commit() return jsonify(success=True, id=album.albumId)
def arena(): # 排序 # page_no = request.args.get('page', default=0) # item_per_page = request.args.get('items', default=10) print(request.args) order = request.args.get('order', default='asc') key = request.args.get('key', default='discount') # 筛选 category = request.args.get('category', default=None) has_pic = inputs.boolean(request.args.get('hasPic', default=False)) unsold = inputs.boolean(request.args.get('unsold', default=False)) print(has_pic, unsold) all_books = BookInfo.query if category is not None: print('filter by category') all_books = all_books.filter_by(category=category) if has_pic: print('filter by picture') all_books = all_books.filter( BookInfo.picture != app.config['NO_PIC_PATH']) if unsold: print('filter by whether bought') all_books = all_books.filter_by(bought=False) key_dict = { 'price': BookInfo.sale_price, 'book_name': BookInfo.book_name, 'discount': BookInfo.discount } if key in key_dict: if order == 'asc': print('filter by key asc') all_books = all_books.order_by(key_dict[key]) elif order == 'desc': print('filter by key desc') all_books = all_books.order_by(desc(key_dict[key])) else: raise Exception() # all_books = all_books.limit(item_per_page).offset(item_per_page * page_no) return jsonify([x.as_ret_dict(request.url_root) for x in all_books.all()])
def get(self, query): with_pdb = boolean(request.args.get('with_pdb', 'true')) response = {'query': query, 'with_pdb': with_pdb} import time start_time_sql = time.time() output = self.get_descriptions(query, with_pdb) end_time_sql = time.time() if output: start_time_loop = time.time() response['output'] = [o[0] for o in list(output)] end_time_loop = time.time() response['size'] = len(response['output']) response['time_sql'] = end_time_sql - start_time_sql response['time_loop'] = end_time_loop - start_time_loop return response
def to_bool(b): """ encapsulate flask_restful.inputs.boolean to prevent exception if format isn't valid >>> to_bool('true') True >>> to_bool('false') False >>> to_bool('f') False >>> to_bool('t') False >>> to_bool('bob') False """ try: return boolean(b) except ValueError: return False
def test_boolean_false(self): assert_equal(inputs.boolean("False"), False)
from __future__ import absolute_import import os import json from flask_restful.inputs import boolean # path of the configuration file for each instances INSTANCES_DIR = os.getenv('JORMUNGANDR_INSTANCES_DIR', '/etc/jormungandr.d') # Patern that matches Jormungandr configuration files # ex: '*.json' will match all json files within "INSTANCES_DIR" directory INSTANCES_FILENAME_PATTERN = os.getenv( 'JORMUNGANDR_INSTANCES_FILENAME_PATTERN', '*.json') # Start the thread at startup, True in production, False for test environments START_MONITORING_THREAD = boolean( os.getenv('JORMUNGANDR_START_MONITORING_THREAD', True)) # URI for postgresql # postgresql://<user>:<password>@<host>:<port>/<dbname> # http://docs.sqlalchemy.org/en/rel_0_9/dialects/postgresql.html#psycopg2 SQLALCHEMY_DATABASE_URI = os.getenv( 'JORMUNGANDR_SQLALCHEMY_DATABASE_URI', 'postgresql://*****:*****@localhost/jormungandr') DISABLE_DATABASE = boolean(os.getenv('JORMUNGANDR_DISABLE_DATABASE', False)) # disable authentication PUBLIC = boolean(os.getenv('JORMUNGANDR_IS_PUBLIC', True)) # message returned on authentication request HTTP_BASIC_AUTH_REALM = os.getenv('JORMUNGANDR_HTTP_BASIC_AUTH_REALM',
def test_bad_boolean(self): assert_raises(ValueError, lambda: inputs.boolean("blah"))
def test_boolean_with_python_bool(self): """Input that is already a native python `bool` should be passed through without extra processing.""" assert_equal(inputs.boolean(True), True) assert_equal(inputs.boolean(False), False)
def test_boolean(self): assert_equal(inputs.boolean("FaLSE"), False)
def test_boolean_is_true_for_1(self): assert_equal(inputs.boolean("1"), True)
from __future__ import absolute_import import os import json from flask_restful.inputs import boolean # path of the configuration file for each instances INSTANCES_DIR = os.getenv('JORMUNGANDR_INSTANCES_DIR', '/etc/jormungandr.d') # Patern that matches Jormungandr configuration files # ex: '*.json' will match all json files within "INSTANCES_DIR" directory INSTANCES_FILENAME_PATTERN = os.getenv( 'JORMUNGANDR_INSTANCES_FILENAME_PATTERN', '*.json') # Start the thread at startup, True in production, False for test environments START_MONITORING_THREAD = boolean( os.getenv('JORMUNGANDR_START_MONITORING_THREAD', True)) # URI for postgresql # postgresql://<user>:<password>@<host>:<port>/<dbname> # http://docs.sqlalchemy.org/en/rel_0_9/dialects/postgresql.html#psycopg2 SQLALCHEMY_DATABASE_URI = os.getenv( 'JORMUNGANDR_SQLALCHEMY_DATABASE_URI', 'postgresql://*****:*****@localhost/jormungandr') DISABLE_DATABASE = boolean(os.getenv('JORMUNGANDR_DISABLE_DATABASE', False)) # Active the asynchronous ridesharing mode ASYNCHRONOUS_RIDESHARING = boolean( os.getenv('JORMUNGANDR_ASYNCHRONOUS_RIDESHARING', False)) # Active ridesharing service call with async greenlet GREENLET_POOL_FOR_RIDESHARING_SERVICES = boolean(
def test_boolean_is_false_for_0(self): assert_equal(inputs.boolean("0"), False)
def get(self): s = time.time() args = self.parser.parse_args() self.lat = args.get('lat') self.lng = args.get('lng') self.lv = args.get('lv') self.radius = args.get('radius') self.isVarFilter = inputs.boolean(args.get('isVarFailter')) self.isClustering = inputs.boolean(args.get('isClustering')) hash = geo.geohash_encoding(self.lat, self.lng)[0:self.lv] t = time.time() with MongoClient('mongodb://localhost:30000') as mongo: results = mongo.yongche.exp_startPos_count_bj.find( {"geohash": { "$regex": hash }}) if VERBOSE: print "[DEBUG] fetch mongo: ", time.time() - t # 半径实验code # out = [] # for data in results: # del data['_id'] # data['count'] = data['cnt'] # del data['cnt'] # # 距离判断 # data['distance'] = vincenty((self.lat, self.lng), (data['lat'], data['lng'])).meters # if data['distance'] <= self.radius: # out.append(data) # out.sort(key=lambda x: x['count'], reverse=True) # radius = self.radius radius, out = self.__getCandidatesByStep(results) # 是否有候选结果 if len(out) == 0: # 封装统计变量 result = dict() result['status'] = 1 result['info'] = "EMPTY" result['radius'] = radius return jsonify(result) # 计算统计变量 df = pd.DataFrame(out)[['count', 'distance']].describe() # 方差截断 if self.isVarFilter: preSize = len(out) # # 75% 过滤 # threshold = df['count']['75%'] # out = [x for x in out if x['count'] >= threshold] # df = pd.DataFrame(out)[['count', 'distance']].describe() # # 热点中还有热点 # if df['count']['mean'] < df['count']['std']: # threshold = df['count']['50%'] # out = [x for x in out if x['count'] >= threshold] # df = pd.DataFrame(out)[['count', 'distance']].describe() df = pd.DataFrame(out) threshold = df['count'].quantile(0.75) # 上四分位数进行第一道过滤 df['isRetain'] = np.where(df['count'] >= threshold, True, False) setOfGeohash = df[df['isRetain'] == True]['geohash'].values # 第一道过滤结束后, 热点中还有热点, 用中位数再次过滤 # std = df[df.isRetain == True]['count'].std() # mean = df[df.isRetain == True]['count'].mean() # if mean < std: # median = df[df.isRetain == True]['count'].median() # setOfGeohash = df[(df['isRetain'] == True) & (df['count']>=median)]['geohash'].values # else: # setOfGeohash = df[df['isRetain'] == True]['geohash'].values preSize = len(out) out = [pt for pt in out if pt['geohash'] in setOfGeohash] print "[DEBUG] statFilter from: ", preSize, "to: ", len(out) df = pd.DataFrame(out)[['count', 'distance']].describe() # 密度聚类 if self.isClustering: out = self.__DBSCAN(out, df['count']['min']) # out = self.__AffinityPropagation(out) # out = self.__MeanShift(out, df['count']['min']) # 加poi name, 简单版本 # poiType = '|'.join(["10", "99", "1507", "1505", "1508", "1506", "1504", "1502", "120302", "1903"]) # pois = Around.batch([geopy.Point(p['lat'], p['lng']) for p in out], poiType=poiType) # if len(pois) == len(out): # for i, poi in enumerate(pois): # # 未查到结果 # if poi[0] == "": # continue # elif 0 < float(poi[1]) <= 50: # out[i]['name'] = poi[0] # elif 20 < float(poi[1]) <= 50: # out[i]['name'] = poi[0] + u"附近" # else: # for i, name in enumerate(pois): # out[i]['name'] = "" t = time.time() out = self.__getPoiNameBySteps(out) print "naming: ", time.time() - t # 封装统计变量 result = dict() result['status'] = 0 result['info'] = "OK" result['radius'] = radius result['result'] = out result['count'] = df['count'].to_dict() result['distance'] = df['distance'].to_dict() print "total elapsed: ", time.time() - s return jsonify(result)
BROKER_CONSUMER_CONFIGURATION_RELOAD_INTERVAL = int( os.getenv("KIRIN_BROKER_CONSUMER_CONFIGURATION_RELOAD_INTERVAL", timedelta(minutes=1).total_seconds())) # TODO : Remove when conf from db is ready NAVITIA_GTFS_RT_INSTANCE = os.getenv("KIRIN_NAVITIA_GTFS_RT_INSTANCE", None) NAVITIA_GTFS_RT_TOKEN = os.getenv("KIRIN_NAVITIA_GTFS_RT_TOKEN", None) GTFS_RT_CONTRIBUTOR = os.getenv("KIRIN_GTFS_RT_CONTRIBUTOR", None) GTFS_RT_FEED_URL = os.getenv("KIRIN_GTFS_RT_FEED_URL", None) NB_DAYS_TO_KEEP_TRIP_UPDATE = int( os.getenv("KIRIN_NB_DAYS_TO_KEEP_TRIP_UPDATE", 2)) NB_DAYS_TO_KEEP_RT_UPDATE = int( os.getenv("KIRIN_NB_DAYS_TO_KEEP_RT_UPDATE", 10)) GTFS_RT_TIMEOUT = int(os.getenv("KIRIN_GTFS_RT_TIMEOUT", 1)) USE_GEVENT = boolean(os.getenv("KIRIN_USE_GEVENT", False)) DEBUG = boolean(os.getenv("KIRIN_DEBUG", False)) # rabbitmq connections string: http://kombu.readthedocs.org/en/latest/userguide/connections.html#urls RABBITMQ_CONNECTION_STRING = os.getenv( "KIRIN_RABBITMQ_CONNECTION_STRING", "pyamqp://*****:*****@localhost:5672//?heartbeat=60") # max nb of retries before giving up publishing MAX_RETRIES = 10 # queue used for task of type load_realtime, all instances of kirin must use the same queue # to be able to load balance tasks between them LOAD_REALTIME_QUEUE = "kirin_load_realtime"
def __call__(self, value): if isinstance(value, bool): return value return boolean(value)
def test_boolean_true(self): assert_equal(inputs.boolean("true"), True)
timedelta(hours=1).total_seconds())) COTS_PAR_IV_REQUEST_TIMEOUT = int( os.getenv('KIRIN_COTS_COTS_PAR_IV_REQUEST_TIMEOUT', timedelta(seconds=2).total_seconds())) # TODO better conf for multi GTFS-RT NAVITIA_GTFS_RT_INSTANCE = os.getenv('KIRIN_NAVITIA_GTFS_RT_INSTANCE', 'sherbrooke') NAVITIA_GTFS_RT_TOKEN = os.getenv('KIRIN_NAVITIA_GTFS_RT_TOKEN', None) GTFS_RT_CONTRIBUTOR = os.getenv('KIRIN_GTFS_RT_CONTRIBUTOR', 'realtime.sherbrooke') GTFS_RT_FEED_URL = os.getenv('KIRIN_GTFS_RT_FEED_URL', None) NB_DAYS_TO_KEEP_TRIP_UPDATE = int(os.getenv('NB_DAYS_TO_KEEP_TRIP_UPDATE', 2)) NB_DAYS_TO_KEEP_RT_UPDATE = int(os.getenv('NB_DAYS_TO_KEEP_RT_UPDATE', 10)) USE_GEVENT = boolean(os.getenv('KIRIN_USE_GEVENT', False)) DEBUG = boolean(os.getenv('KIRIN_DEBUG', False)) # rabbitmq connections string: http://kombu.readthedocs.org/en/latest/userguide/connections.html#urls RABBITMQ_CONNECTION_STRING = os.getenv( 'KIRIN_RABBITMQ_CONNECTION_STRING', 'pyamqp://*****:*****@localhost:5672//?heartbeat=60') # max nb of retries before giving up publishing MAX_RETRIES = 10 # queue used for task of type load_realtime, all instances of kirin must use the same queue # to be able to load balance tasks between them LOAD_REALTIME_QUEUE = 'kirin_load_realtime'