def get(self): """ 管理员查询 """ args_parser = RequestParser() args_parser.add_argument('order_by', location='args') args_parser.add_argument('keyword', location='args') args_parser.add_argument('status', type=inputs.int_range(0, 1), location='args') args_parser.add_argument('page', type=inputs.positive, required=False, location='args') args_parser.add_argument('per_page', type=inputs.int_range(constants.PER_PAGE_MIN, constants.PER_PAGE_MAX, 'per_page'), required=False, location='args') args = args_parser.parse_args() page = constants.DEFAULT_PAGE if args.page is None else args.page per_page = constants.DEFAULT_PER_PAGE if args.per_page is None else args.per_page administrators = MisAdministrator.query if args.status is not None: administrators = administrators.filter_by(status=args.status) if args.keyword: administrators = administrators.filter(or_(MisAdministrator.account.like('%' + args.keyword + '%'), MisAdministrator.name.like('%' + args.keyword + '%'))) if args.order_by is not None: if args.order_by == 'id': administrators = administrators.order_by(MisAdministrator.id.asc()) else: administrators = administrators.order_by(MisAdministrator.utime.desc()) total_count = administrators.count() administrators = administrators.offset(per_page * (page - 1)).limit(per_page).all() ret = marshal(administrators, AdministratorListResource.administrators_fields, envelope='administrators') ret['total_count'] = total_count return ret
def nautical_flags(text): TEXT_SIZE_LIMIT = 100 # just to prevent abuse parser = reqparse.RequestParser() parser.add_argument('row_size', type=inputs.positive, default=10) parser.add_argument('size', type=inputs.int_range(10, 100), default=100) parser.add_argument('padding', type=inputs.int_range(0, 50), default=10) parser.add_argument('background', type=parse_color, default=(0xC8, 0xD0, 0xD4)) args = parser.parse_args() ROW_SIZE = args['row_size'] FLAG_WIDTH = FLAG_HEIGHT = args['size'] PADDING = args['padding'] BACKGROUND = args['background'] if len(text) > TEXT_SIZE_LIMIT: raise BadRequest('Text too long') text = text.upper().strip() try: tokens = list(tokenize_text(text)) except ValueError as exc: raise BadRequest(str(exc)) def _group_tokens(t): while t: yield t[:ROW_SIZE] t = t[ROW_SIZE:] rows = list(_group_tokens(tokens)) if len(rows) == 0: raise BadRequest('Text must contain at least one flag') img_width = (PADDING + FLAG_WIDTH) * len(rows[0]) + PADDING img_height = (PADDING + FLAG_HEIGHT) * len(rows) + PADDING img = Image.new('RGBA', (img_width, img_height), color=BACKGROUND) hoff = voff = PADDING for row in rows: for letter in row: letter_img = FLAGS_IMAGES.get(letter) if letter_img: scaled = letter_img.resize((FLAG_WIDTH, FLAG_HEIGHT)) img.paste(scaled, box=(hoff, voff), mask=scaled) hoff += FLAG_WIDTH + PADDING hoff = PADDING voff += FLAG_HEIGHT + PADDING output = io.BytesIO() img.save(output, 'png') return output.getvalue(), 200, {'content-type': 'image/png'}
def get(self): """ 获取用户列表 """ args_parser = RequestParser() args_parser.add_argument('page', type=inputs.positive, required=False, location='args') args_parser.add_argument('per_page', type=inputs.int_range(constants.PER_PAGE_MIN, constants.PER_PAGE_MAX, 'per_page'), required=False, location='args') args_parser.add_argument('keyword', location='args') args_parser.add_argument('begin', type=parser.date_time, location='args') args_parser.add_argument('end', type=parser.date_time, location='args') args_parser.add_argument('status', type=inputs.int_range(0, 1), location='args') args_parser.add_argument('order_by', location='args') args = args_parser.parse_args() page = constants.DEFAULT_PAGE if args.page is None else args.page per_page = constants.DEFAULT_PER_PAGE if args.per_page is None else args.per_page users = User.query if args.keyword: users = users.filter( or_(User.mobile.like('%' + args.keyword + '%'), User.account.like('%' + args.keyword + '%'))) if args.status is not None: users = users.filter(User.status == args.status) if args.begin and args.end and args.end > args.begin: users = users.filter(User.last_login.between(args.begin, args.end)) if args.order_by is not None: if args.order_by == 'id': users = users.order_by(User.id.asc()) else: users = users.order_by(User.last_login.desc()) else: users = users.order_by(User.last_login.asc()) total_count = users.count() users = users.offset(per_page * (page - 1)).limit(per_page).all() ret = marshal(users, UserListResource.user_fields, envelope='users') ret['total_count'] = total_count return ret
class Matches(Resource): def get(self): conn = db.connect() s = select([matches]) result = conn.execute(s) return {'matches': [dict(row) for row in result]} name_options = { 'type': inputs.regex('^.+$'), 'help': "Must not be empty", 'required': True, 'nullable': False } score_options = { 'type': inputs.int_range(0, 10), 'required': True, 'nullable': False } post_parser = reqparse.RequestParser() post_parser.add_argument('a_off', **name_options) post_parser.add_argument('a_def', **name_options) post_parser.add_argument('a_score', **score_options) post_parser.add_argument('b_off', **name_options) post_parser.add_argument('b_def', **name_options) post_parser.add_argument('b_score', **score_options) def post(self): conn = db.connect() args = self.post_parser.parse_args() ins = matches.insert().values(**args) result = conn.execute(ins) return {'result': dict(result)}
def get(self): """ 获取粉丝列表 """ req_parser = RequestParser() req_parser.add_argument('page', type=inputs.positive, required=False, location='args') req_parser.add_argument('per_page', type=inputs.int_range(constants.DEFAULT_FOLLOWER_PER_PAGE_MIN, constants.DEFAULT_FOLLOWER_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = req_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_FOLLOWER_PER_PAGE_MIN # 总量查询 ret = db.session.query(func.count(Relation.id)).filter(Relation.target_user_id == g.user_id, Relation.relation == Relation.RELATION.FOLLOW).first() total_count = ret[0] results = [] if total_count > 0 and (page-1)*per_page < total_count: followers = User.query.join(User.followings).options(load_only(User.id, User.name, User.profile_photo))\ .filter(Relation.target_user_id == g.user_id, Relation.relation == Relation.RELATION.FOLLOW)\ .order_by(Relation.utime.desc()).offset((page-1)*per_page).limit(per_page).all() for follower in followers: results.append(dict( id=follower.id, name=follower.name, photo=current_app.config['QINIU_DOMAIN'] + (follower.profile_photo if follower.profile_photo else cache_constants.DEFAULT_USER_PROFILE_PHOTO) )) return {'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results}
def __init__(self): """ Input validation using reqparse """ self.reqparse = reqparse.RequestParser() self.reqparse.add_argument( 'course', type=inputs.positive, required=True, help='No course provided', location=['form', 'json'] ) self.reqparse.add_argument( 'rating', type=inputs.int_range(1, 5), required=True, help='No rating provided', location=['form', 'json'] ) self.reqparse.add_argument( 'comment', required=False, location=['form', 'json'], default='' ) super().__init__()
def post_args(return_parse_args=True): rp = reqparse.RequestParser() rp.add_argument("bugName", type=unicode, required=True, nullable=False) rp.add_argument("desc", type=unicode, required=True, nullable=False) rp.add_argument("poc", type=unicode, required=True, nullable=False) rp.add_argument("grade", type=inputs.int_range(-1, 4), required=True, nullable=False) return rp.parse_args() if return_parse_args else rp
def get(self): """ 获取用户的收藏历史 """ qs_parser = RequestParser() qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range( constants.DEFAULT_ARTICLE_PER_PAGE_MIN, constants.DEFAULT_ARTICLE_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_ARTICLE_PER_PAGE_MIN total_count, collections = cache_user.UserArticleCollectionsCache( g.user_id).get_page(page, per_page) results = [] for article_id in collections: article = cache_article.ArticleInfoCache(article_id).get() results.append(article) return { 'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results }
def get(self): """ 获取系统公告列表 """ qs_parser = RequestParser() qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range( constants.DEFAULT_ANNOUNCEMENT_PER_PAGE_MIN, constants.DEFAULT_ANNOUNCEMENT_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_ANNOUNCEMENT_PER_PAGE_MIN total_count, results = cache_notice.AnnouncementsCache.get_page( page, per_page) return { 'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results }
def put(self, target): """ 修改频道 """ json_parser = RequestParser() json_parser.add_argument('name', required=True, location='json') json_parser.add_argument('is_visible', type=inputs.int_range(0, 1), required=True, location='json') json_parser.add_argument('sequence', type=inputs.positive, location='json') args = json_parser.parse_args() channel = Channel.query.filter_by(id=target).first() if not channel: return {'message': 'Invalid channel id.'}, 400 if args.name is not None and args.name != channel.name: if Channel.query.filter_by(name=args.name).first(): return {'message': '{} already exists'.format(args.name)}, 403 channel.name = args.name if args.is_visible is not None: channel.is_visible = args.is_visible if args.sequence is not None: channel.sequence = args.sequence db.session.add(channel) db.session.commit() return marshal(channel, ChannelListResource.channel_fields), 201
def get(self): """ 获取关注的用户列表 """ qs_parser = RequestParser() qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range(constants.DEFAULT_USER_FOLLOWINGS_PER_PAGE_MIN, constants.DEFAULT_USER_FOLLOWINGS_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_USER_FOLLOWINGS_PER_PAGE_MIN results = [] followings = cache_user.UserFollowingCache(g.user_id).get() followers = cache_user.UserFollowersCache(g.user_id).get() total_count = len(followings) req_followings = followings[(page-1)*per_page:page*per_page] for following_user_id in req_followings: user = cache_user.UserProfileCache(following_user_id).get() results.append(dict( id=following_user_id, name=user['name'], photo=user['photo'], fans_count=user['fans_count'], mutual_follow=following_user_id in followers )) return {'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results}
def put(self): """ 批量通过/驳回 """ json_parser = RequestParser() json_parser.add_argument('legalize_ids', action='append', type=inputs.positive, required=True, location='json') json_parser.add_argument('status', type=inputs.int_range(2, 3), required=True, location='json') json_parser.add_argument('reject_reason', location='json') args = json_parser.parse_args() legalizes = LegalizeLog.query.filter( LegalizeLog.id.in_(args.legalize_ids)) user_ids = [legal.user_id for legal in legalizes.all()] count = legalizes.update({'status': args.status}, synchronize_session=False) if args.status == LegalizeLog.STATUS.REJECT: legalizes.update( {'reject_reason': args.reject_reason or '资料不通过,驳回'}, synchronize_session=False) User.query.filter(User.id.in_(user_ids)).update( {'is_media': False}, synchronize_session=False) elif args.status == LegalizeLog.STATUS.APPROVED: User.query.filter(User.id.in_(user_ids)).update( {'is_media': True}, synchronize_session=False) db.session.commit() return {'count': count}, 201
def get(self, user_id): """ 获取user_id 用户的文章数据 """ exist = cache_user.UserProfileCache(user_id).exists() if not exist: return {'message': 'Invalid request.'}, 400 qs_parser = RequestParser() qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range(constants.DEFAULT_ARTICLE_PER_PAGE_MIN, constants.DEFAULT_ARTICLE_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_ARTICLE_PER_PAGE_MIN results = [] total_count, page_articles = cache_user.UserArticlesCache(user_id).get_page(page, per_page) for article_id in page_articles: article = cache_article.ArticleInfoCache(article_id).get() if article: results.append(article) return {'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results}
def get(self): """ 获取当前用户的文章列表 """ qs_parser = RequestParser() qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range(constants.DEFAULT_ARTICLE_PER_PAGE_MIN, constants.DEFAULT_ARTICLE_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_ARTICLE_PER_PAGE_MIN results = [] total_count, page_articles = cache_user.UserArticlesCache(g.user_id).get_page(page, per_page) user_article_attitude_cache = cache_user.UserArticleAttitudeCache(g.user_id) for article_id in page_articles: article = cache_article.ArticleInfoCache(article_id).get() if article: article['is_liking'] = user_article_attitude_cache.determine_liking_article(article_id) results.append(article) return {'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results}
def put(self, target): """ 修改id=target管理员组信息 """ json_parser = RequestParser() json_parser.add_argument('name', location='json') json_parser.add_argument('remark', location='json') json_parser.add_argument('status', type=inputs.int_range(0, 1), location='json') json_parser.add_argument('permission_ids', action='append', type=inputs.positive, location='json') args = json_parser.parse_args() print(args) group = MisAdministratorGroup.query.filter_by(id=target).first() if not group: return {'message': 'Invalid group id.'}, 400 if args.name and args.name != group.name: if MisAdministratorGroup.query.filter_by(name=args.name).first(): return {'message': '{} already exists'.format(args.name)} group.name = args.name if args.status is not None: group.status = args.status if args.remark: group.remark = args.remark if args.permission_ids is not None: inc_red_group_permission(group.id, args.permission_ids) db.session.add(group) db.session.commit() return marshal(group, GroupListResource.group_fields), 201
def get(self): args_parser = RequestParser() args_parser.add_argument('page', type=inputs.positive, required=False, location='args') args_parser.add_argument('per_page', type=inputs.int_range(constants.PER_PAGE_MIN, constants.PER_PAGE_MAX, 'per_page'), required=False, location='args') args = args_parser.parse_args() page = constants.DEFAULT_PAGE if args.page is None else args.page per_page = constants.DEFAULT_PER_PAGE if args.per_page is None else args.per_page ssts = StatisticsSearchTotal.query.order_by( StatisticsSearchTotal.count.desc()) total_count = ssts.count() ret = {'keywords': [], 'total_count': total_count} ssts = ssts.offset(per_page * (page - 1)).limit(per_page).all() for sst in ssts: lask_week_count, week_count, week_percent = self._get_week_percent( sst.keyword) ret['keywords'].append({ 'keyword': sst.keyword, 'user_count': sst.user_count, 'week_percent': week_percent, # 'lask_week_count': lask_week_count, # 'week_count': week_count }) return ret
def __init__(self): """ Constructor - Setup argument parser """ self.args_parser = reqparse.RequestParser(trim=True) self.args_parser.add_argument(name='level', type=inputs.int_range(0, 255), required=False)
def get(self): qs_parser = RequestParser() qs_parser.add_argument('q', type=inputs.regex(r'^.{1,50}$'), required=True, location='args') # 搜索关键词 限制输入1-50个字符 qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') # 页号,限制整数自然数 qs_parser.add_argument( 'per_page', # 每页多少个 required=False, location='args', type=inputs.int_range(1, 20, 'per_page')) args = qs_parser.parse_args() q = args.q page = 1 if args.page is None else args.page # if args.page is None: # page = 1 # else: # page= args.page per_page = 10 if args.per_page is None else args.per_page query = { 'from': (page - 1) * per_page, # 从第几条开始 'size': per_page, # 一共返回多少条 '_source': ['title', 'article_id'], # 指定要返回的字段 'query': { 'bool': { 'must': [{ 'match': { '_all': q } } # 全文检索 ], 'filter': [{ 'term': { 'status': 2 } } # 审核状态必须是2 过审 ] } } } es_ret = current_app.es.search(index='articles', doc_type='article', body=query) # 返回dict results = [i['_source'] for i in es_ret['hits']['hits']] return { 'page': page, 'per_page': per_page, 'results': results, 'total_count': es_ret['hits']['total'] }
def get(self): """ 获取文章列表 /v1_0/articles?channel_id&page&per_page """ qs_parser = RequestParser() qs_parser.add_argument('channel_id', type=parser.channel_id, required=True, location='args') qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range( constants.DEFAULT_ARTICLE_PER_PAGE_MIN, constants.DEFAULT_ARTICLE_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() channel_id = args.channel_id page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_ARTICLE_PER_PAGE_MIN results = [] if page == 1: # 第一页 top_article_id_li = cache_article.ChannelTopArticlesStorage( channel_id).get() for article_id in top_article_id_li: article = cache_article.ArticleInfoCache(article_id).get() if article: results.append(article) # 获取推荐文章列表 # ret = self._get_recommended_articles(channel_id, page, per_page) # feed推荐 未使用page参数 feeds = self._feed_articles(channel_id, per_page) # 查询文章 for feed in feeds: # self._generate_article_cover(article_id) article = cache_article.ArticleInfoCache(feed.article_id).get() if article: article['trace'] = { 'click': feed.params.click, 'collect': feed.params.collect, 'share': feed.params.share, 'read': feed.params.read } results.append(article) return {'page': page, 'per_page': per_page, 'results': results}
def __init__(self): self.args_parser = reqparse.RequestParser() # (11) self.args_parser.add_argument( name='level', # Name of arguement required=True, # Mandatory arguement type=inputs.int_range( 0, 100), # Allowed range 0..100 # (12) help='Set LED brightness level {error_msg}', default=None)
def get(self): res_parser = RequestParser() res_parser.add_argument('user_id', required=True, action='append', type=int) res_parser.add_argument('sex', required=True, help='missing a param') res_parser.add_argument('age', required=True, type=inputs.int_range(1, 100)) res_parser.add_argument('phone', required=True, type=mobile_checkout) res = res_parser.parse_args() user_id = res.user_id sex = res.sex age = res.age phone = res.phone return {'data': {'user_id': user_id, 'sex': sex, 'age': age, 'phone': phone}}
def post(self): parser = reqparse.RequestParser() parser.add_argument('name', type=str, help='用户名出错') parser.add_argument('age', type=inputs.int_range(12, 60), help='用户年龄出错') parser.add_argument('sex', type=str, choices=['male', 'female', 'secret'], help='性别输入错误') args = parser.parse_args() print(args) return {'code': '200'}
def put(self, target): """ 修改id=target管理员信息 """ json_parser = RequestParser() json_parser.add_argument('account', type=parser.mis_account, location='json') json_parser.add_argument('password', type=parser.mis_password, location='json') json_parser.add_argument('name', location='json') json_parser.add_argument('group_id', type=parser.mis_group_id, location='json') json_parser.add_argument('status', type=inputs.int_range(0, 1), location='json') json_parser.add_argument('email', type=parser.email, location='json') json_parser.add_argument('mobile', type=parser.mobile, location='json') json_parser.add_argument('current_password', type=parser.mis_password, location='json') args = json_parser.parse_args() print(args) administrator = MisAdministrator.query.filter_by(id=target).first() if not administrator: return {'message': 'Invalid administrator id.'}, 403 if args.account and args.account != administrator.account: if MisAdministrator.query.filter_by(account=args.account).first(): return {'message': '{} already exists'.format(args.account)} administrator.account = args.account if args.password: if target == g.administrator_id \ and administrator.password != generate_password_hash(args.current_password): return {'message': 'Current password error.'}, 403 administrator.password = generate_password_hash(args.password) if args.name: administrator.name = args.name if args.group_id: administrator.group_id = args.group_id if args.status is not None: administrator.status = args.status if args.email: administrator.email = args.email if args.mobile: administrator.mobile = args.mobile try: db.session.add(administrator) db.session.commit() except: db.session.rollback() raise return marshal(administrator, AdministratorListResource.administrators_fields), 201
def get(self): """ 获取搜索结果 """ qs_parser = RequestParser() qs_parser.add_argument('q', type=inputs.regex(r'^.{1,50}$'), required=True, location='args') qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range(constants.DEFAULT_SEARCH_PER_PAGE_MIN, constants.DEFAULT_SEARCH_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() q = args.q page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_SEARCH_PER_PAGE_MIN # Search from Elasticsearch query = { 'from': (page-1)*per_page, 'size': per_page, '_source': False, 'query': { 'bool': { 'must': [ {'match': {'_all': q}} ], 'filter': [ {'term': {'status': 2}} ] } } } ret = current_app.es.search(index='articles', doc_type='article', body=query) total_count = ret['hits']['total'] results = [] hits = ret['hits']['hits'] for result in hits: article_id = int(result['_id']) article = cache_article.ArticleInfoCache(article_id).get() if article: results.append(article) # Record user search history if g.user_id and page == 1: try: cache_user.UserSearchingHistoryStorage(g.user_id).save(q) except RedisError as e: current_app.logger.error(e) return {'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results}
def get(self): parse.add_argument('name', required=True, help='missing name') parse.add_argument('like', action='append') parse.add_argument('height', type=inputs.int_range(180, 230)) parse.add_argument('mobile', type=re_mobile) parse.add_argument('json_str', required=True, location='json') name = parse.parse_args().get('name') like = parse.parse_args().get('like') height = parse.parse_args().get('height') mobile = parse.parse_args().get('mobile') json_str = parse.parse_args().get('json_str') return 'name:{} like:{} height:{} mobile:{} json_str:{}'.format( name, like, height, mobile, json_str)
def __init__(self): """ Constructor - Setup argument parser """ self.args_parser = reqparse.RequestParser(trim=True) self.args_parser.add_argument(name='mode', type=str, required=False, choices=("stop", "blink", "left", "right", "rainbow"), case_sensitive=False, help='Mode') self.args_parser.add_argument(name='speed', type=inputs.int_range(1, 10), required=False)
def get(self): """ 获取运营日志列表 """ args_parser = RequestParser() args_parser.add_argument('page', type=inputs.positive, required=False, location='args') args_parser.add_argument('per_page', type=inputs.int_range(constants.PER_PAGE_MIN, constants.PER_PAGE_MAX, 'per_page'), required=False, location='args') args_parser.add_argument('keyword', location='args') args_parser.add_argument('order_by', location='args') args_parser.add_argument('begin', type=parser.date_time, location='args') args_parser.add_argument('end', type=parser.date_time, location='args') args = args_parser.parse_args() page = constants.DEFAULT_PAGE if args.page is None else args.page per_page = constants.DEFAULT_PER_PAGE if args.per_page is None else args.per_page logs = MisOperationLog.query if args.keyword: logs = logs.filter( MisOperationLog.operation.like('%' + args.keyword + '%'), MisOperationLog.description.like('%' + args.keyword + '%')) if args.begin and args.end and args.end > args.begin: logs = logs.filter( MisOperationLog.ctime.between(args.begin, args.end)) if args.order_by is not None: if args.order_by == 'id': logs = logs.order_by(MisOperationLog.id.desc()) else: logs = logs.order_by(MisOperationLog.ctime.desc()) total_count = logs.count() logs = logs.offset(per_page * (page - 1)).limit(per_page).all() ret = marshal(logs, OperationLogListResource.logs_fields, envelope='operationlogs') ret['total_count'] = total_count add_log('查询', '查询: 运营日志') return ret
def get(self): """ 查询图片 """ req_parser = RequestParser() req_parser.add_argument('collect', type=inputs.boolean, required=False, location='args') req_parser.add_argument('page', type=inputs.positive, required=False, location='args') req_parser.add_argument('per_page', type=inputs.int_range(1, constants.DEFAULT_IMAGE_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = req_parser.parse_args() collect = args['collect'] page = 1 if args['page'] is None else args['page'] per_page = args.per_page if args.per_page else constants.DEFAULT_IMAGE_PER_PAGE resp = {'total_count': 0, 'page': page, 'per_page': per_page, 'results': []} # 查询总数 total_query = db.session.query(func.count(Material.id)).filter(Material.user_id == g.user_id, Material.status != Material.STATUS.DELETED) if collect: total_query = total_query.filter_by(is_collected=True) ret = total_query.first() total_count = ret[0] if total_count == 0 or page > math.ceil(total_count/per_page): return resp query = Material.query.options(load_only(Material.id, Material.url, Material.is_collected))\ .filter(Material.user_id == g.user_id, Material.status != Material.STATUS.DELETED) if collect: query = query.filter_by(is_collected=True) materials = query.order_by(Material.is_collected.desc(), Material.ctime.desc())\ .offset((page-1)*per_page).limit(per_page).all() results = [] for material in materials: results.append(dict( id=material.id, url=current_app.config['QINIU_DOMAIN'] + material.url, is_collected=material.is_collected )) resp['total_count'] = total_count resp['results'] = results return resp
def __init__(self): self.reqparse = reqparse.RequestParser() self.reqparse.add_argument("course", required=True, help="No course provided", location=["form", "json"], type=inputs.positive) self.reqparse.add_argument("rating", required=True, help="No rating provided", location=["form", "json"], type=inputs.int_range(1, 5)) self.reqparse.add_argument("comment", required=False, location=["form", "json"], default="", nullable=True) super().__init__()
def __init__(self): self.reqpare = reqparse.RequestParser() self.reqpare.add_argument('course', type=inputs.positive, required=True, help='No Course Provider', location=['json', 'form']) self.reqpare.add_argument('rating', type=inputs.int_range(1, 5), required=True, help='No rating Provided', location=['json', 'form']) self.reqpare = reqparse.add_argument('comment', required=False, nullable=True, location=['json', 'form'], default='') super().__init__()
def get(self): """ 获取用户认证申请记录 """ args_parser = RequestParser() args_parser.add_argument('page', type=inputs.positive, required=False, location='args') args_parser.add_argument('per_page', type=inputs.int_range(constants.PER_PAGE_MIN, constants.PER_PAGE_MAX, 'per_page'), required=False, location='args') args_parser.add_argument('status', type=inputs.positive, location='args') args_parser.add_argument('order_by', location='args') args = args_parser.parse_args() page = constants.DEFAULT_PAGE if args.page is None else args.page per_page = constants.DEFAULT_PER_PAGE if args.per_page is None else args.per_page legalizes = LegalizeLog.query if args.status is not None: legalizes = legalizes.filter_by(status=args.status) if args.order_by is not None: if args.order_by == 'id': legalizes = legalizes.order_by(LegalizeLog.id.asc()) else: legalizes = legalizes.order_by(LegalizeLog.utime.desc()) else: legalizes = legalizes.order_by(LegalizeLog.utime.desc()) total_count = legalizes.count() legalizes = legalizes.offset(per_page * (page - 1)).limit(per_page).all() ret = marshal(legalizes, LegalizeListResource.legalize_fields, envelope='legalizes') ret['total_count'] = total_count return ret
def __init__(self): self.reqparse = reqparse.RequestParser() self.reqparse.add_argument( 'restaurant', required=True, help='No restaurant provided', location=['form', 'json'], type=inputs.positive ) self.reqparse.add_argument( 'rating', required=True, help='Rating should be a number between 1 and 5', location=['form', 'json'], type=inputs.int_range(1,5) ) self.reqparse.add_argument( 'comment', required=False, nullable=True, location=['form', 'json'], default='' ) super().__init__()
def test_int_range_high(self): int_range = inputs.int_range(0, 5) assert_raises(ValueError, lambda: int_range(6))
def test_int_range_low(self): int_range = inputs.int_range(0, 5) assert_raises(ValueError, lambda: int_range(-1))
def test_int_range_inclusive(self): int_range = inputs.int_range(1, 5) assert_equal(5, int_range(5))
def test_int_range_good(self): int_range = inputs.int_range(1, 5) assert_equal(3, int_range(3))
def test_int_range_low(self): assert_raises(ValueError, lambda: inputs.int_range(0, 5, -1, "my_arg"))
def test_int_range_high(self): assert_raises(ValueError, lambda: inputs.int_range(0, 5, 6, 'my_arg'))
def test_int_range_inclusive(self): assert_equal(5, inputs.int_range(1, 5, 5, 'my_arg'))
accountParser.add_argument("password", required=True) # parser for get schools request pageParser = reqparse.RequestParser(bundle_errors=True) pageParser.add_argument("offset", type=inputs.natural) pageParser.add_argument("limit", type=inputs.positive) # parser for post and put schools requests schoolParser = reqparse.RequestParser(bundle_errors=True) schoolParser.add_argument("schoolName", required=True) schoolParser.add_argument("typeOfSchool", required=True, action='append', choices=["Daycare", "Preschool", "Elementary School (K-5)", "Middle School (6-8)", "High School (9-12)"]) schoolParser.add_argument("pubOrPri", required=True, choices=["Public", "Private"]) schoolParser.add_argument("location", required=True, type=inputs.regex("^[0-9]{5}$"), help="Please enter a valid 5-digit zip code.") schoolParser.add_argument("foreignLanguagesTaught", action='append', choices=["Spanish", "French", "German", "Japanese", "Mandarin"], default=[]) schoolParser.add_argument("rating", required=True, type=inputs.int_range(1, 10)) # parser for post and put students requests studentParser = reqparse.RequestParser(bundle_errors=True) studentParser.add_argument("studentName", required=True) studentParser.add_argument("grade", required=True, choices=["Daycare", "Preschool", "K", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"]) studentParser.add_argument("currentSchoolST", required=True) studentParser.add_argument("pastSchoolsST", required=True, action='append') # parser for post and put parents requests parentParser = reqparse.RequestParser(bundle_errors=True) parentParser.add_argument("parentName", required=True) # parser for linking students and parents linkStudentToParentParser = reqparse.RequestParser(bundle_errors=True)
def test_int_range_good(self): assert_equal(3, inputs.int_range(1, 5, 3, 'my_arg'))