class RHTimeline(RHRoomBookingBase): @use_args({ 'room_ids': fields.List(fields.Int()), 'start_dt': fields.DateTime(), 'end_dt': fields.DateTime(), 'repeat_frequency': EnumField(RepeatFrequency), 'repeat_interval': fields.Int(missing=0), 'flexibility': fields.Int(missing=0) }) def _process(self, args): rooms = Room.query.filter(Room.is_active, Room.id.in_(args.pop('room_ids'))) date_range, availability = get_rooms_availability(rooms, **args) date_range = [dt.isoformat() for dt in date_range] for room_id in availability: data = availability[room_id] data['room'] = rooms_schema.dump(data['room'], many=False).data data.update({ k: serialize_occurrences(data[k]) for k in [ 'candidates', 'pre_bookings', 'bookings', 'conflicts', 'pre_conflicts' ] }) return jsonify_data(flash=False, availability=availability, date_range=date_range)
class UserInviteResponseSchema(Schema): id = fields.Int() email = fields.Email(required=True) token = fields.Str(required=True) expired_at = fields.DateTime(required=True) created_at = fields.DateTime(required=True) created_by_id = fields.Int(required=True)
class FilterDeviceHealthSchema(FilterSchema): sort_by = fields.Str(validate=validate.OneOf( ['device_id', 'software_version', 'created_at']), missing='created_at') device_id = fields.Integer(missing=None) start_date_time = fields.DateTime(missing=None) end_date_time = fields.DateTime(missing=None)
class RHRoomEvents(RHRoomBase): @use_kwargs({ 'start_dt': fields.DateTime(), 'end_dt': fields.DateTime(), 'repeat_frequency': EnumField(RepeatFrequency, missing='NEVER'), 'repeat_interval': fields.Int(missing=1), }) def _process(self, start_dt, end_dt, repeat_frequency, repeat_interval): events = get_room_events(self.room, start_dt, end_dt, repeat_frequency, repeat_interval) return jsonify(reservation_user_event_schema.dump(events).data)
class RHCreateBooking(RHRoomBookingBase): def _validate_room_booking_limit(self, start_dt, end_dt, booking_limit_days): day_start_dt = datetime.combine(start_dt.date(), time()) day_end_dt = datetime.combine(end_dt.date(), time(23, 59)) selected_period_days = (day_end_dt - day_start_dt).days return selected_period_days <= booking_limit_days @use_args({ 'start_dt': fields.DateTime(required=True), 'end_dt': fields.DateTime(required=True), 'repeat_frequency': EnumField(RepeatFrequency, required=True), 'repeat_interval': fields.Int(missing=0), 'room_id': fields.Int(required=True), 'user_id': fields.Int(), 'booking_reason': fields.String(load_from='reason', validate=validate.Length(min=3)), 'is_prebooking': fields.Bool(missing=False) }) def _process(self, args): room = Room.get_one(args.pop('room_id')) user_id = args.pop('user_id', None) booked_for = User.get_one(user_id) if user_id else session.user is_prebooking = args.pop('is_prebooking') # Check that the booking is not longer than allowed booking_limit_days = room.booking_limit_days or rb_settings.get( 'booking_limit') if not self._validate_room_booking_limit( args['start_dt'], args['end_dt'], booking_limit_days): msg = ( _('Bookings for the room "{}" may not be longer than {} days' ).format(room.name, booking_limit_days)) return jsonify(success=False, msg=msg) try: Reservation.create_from_data(room, dict(args, booked_for_user=booked_for), session.user, prebook=is_prebooking) db.session.flush() except NoReportError as e: db.session.rollback() return jsonify(success=False, msg=unicode(e)) return jsonify(success=True, is_prebooking=is_prebooking)
class ArticleSchema(Schema): id = fields.Int(dump_only=True) title = fields.String(required=True, validate=validate.Length(3)) content = fields.String(required=False) user_id = fields.Int(required=True) category_id = fields.Int(required=True) timestamp = fields.DateTime(dump_only=True) update_date = fields.DateTime(dump_only=True) class Meta: strict = True
def search_args(): """Defines and validates params for index""" return { "search": fields.String(missing=None), "team_id": fields.UUID(missing=None), "types": fields.String(load_from="type", missing="image"), "pipeline": fields.Integer(), "start_date": fields.DateTime(), "end_date": fields.DateTime(), "offset": fields.Integer(missing=0), "limit": fields.Integer(missing=12), "notify_clients": fields.Boolean(missing=False), }
class EpisodeDetailsSchema(Schema): id = fields.Int(required=True) title = fields.Str(required=True) author = fields.Str() status = fields.Str() status_display = fields.Str() length = fields.Int(required=True) watch_url = fields.URL() remote_url = fields.URL() image_url = fields.URL() file_size = fields.Int() description = fields.Str() created_at = fields.DateTime(required=True) published_at = fields.DateTime(required=True, allow_none=True)
def filter_value_type_to_request_arg_type(name, value_type, allow_multiple, load_from=None): if value_type == str: arg_type = fields.Str(load_from=load_from or name, location='query') elif value_type == float: arg_type = fields.Float(load_from=load_from or name, location='query') elif value_type == int: arg_type = fields.Int(load_from=load_from or name, location='query') elif value_type == datetime: arg_type = fields.DateTime(load_from=load_from or name, location='query') else: raise Exception( "Unsupported value type '{}' for a request argument".format( value_type)) if allow_multiple: arg_type = fields.DelimitedList(arg_type, load_from=load_from or name, location='query') return arg_type
class CategoryPostListResource(TokenRequiredResource): get_args = { "title": fields.String(allow_none=True, validate=lambda x: 0 <= len(x) <= 255), "slug": fields.String(allow_none=True, validate=lambda x: 0 <= len(x) <= 255), "author_id": fields.Integer(allow_none=True, validate=lambda x: x > 0), "created_at": fields.DateTime(allow_none=True, format="iso8601"), } @use_args(get_args) def get(self, query_args, id): filters = [] if "main" in query_args: filters.append(PostCategory.primary == query_args["main"]) if "title" in query_args: filters.append(Post.title.like("%{filter}%".format(filter=query_args["title"]))) if "slug" in query_args: filters.append(Post.slug.like("%{filter}%".format(filter=query_args["slug"]))) if "author_id" in query_args: filters.append(Post.author_id == query_args["author_id"]) if "created_at" in query_args: filters.append(Post.created_at == query_args["created_at"]) pagination_helper = PaginationHelper( request, query=Category.query.get(id).posts.filter(*filters), resource_for_url="api.category_posts", key_name="results", schema=post_schema, url_parameters={"id": id}, query_args=query_args, ) result = pagination_helper.paginate_query() return result
class EventSchema(Schema): id = fields.Int(dump_only=True) owner = fields.Int(dump_only=True) category_id = fields.Int(required=True) name = fields.Str(required=True, validate=validate.Length(3)) description = fields.Str(missing='') price = fields.Int(missing=0) location = fields.Str(required=True, validate=validate.Length(3)) type = fields.Str(required=True, validate=validate.Length(3)) maxNumOfAttendees = fields.Int(required=True) dueDate = fields.DateTime(dump_only=True) dateCreated = fields.DateTime(dump_only=True) dateModified = fields.DateTime(dump_only=True) isPublic = fields.Bool(dump_only=True) class Meta: strict = True
class PodcastDetailsSchema(Schema): id = fields.Int(required=True) name = fields.Str(required=True) description = fields.Str(required=True) created_at = fields.DateTime(required=True) image_url = fields.URL() rss_link = fields.URL() download_automatically = fields.Boolean(default=True) episodes_count = fields.Integer(default=0)
class Task2ListSchema(ma.Schema): id = fields.UUID() status = fields.Str(attribute='status.name') ctime = fields.DateTime() mtime = fields.DateTime() machine = fields.Str(attribute='machine.name') priority = fields.Int() _links = ma.Hyperlinks({ 'self': ma.AbsoluteURLFor('task2resource', tid='<id>'), 'collection': ma.AbsoluteURLFor('task2listresource'), 'uploads': ma.AbsoluteURLFor('task2uploadresource', tid='<id>'), 'calculation': ma.AbsoluteURLFor('calculationresource', cid='<calculation_id>'), })
class AdminGrouponPeriodVerificationView(AdminBaseView): """后台-玩法-拼团-验证时间段""" @AdminBaseView.permission_required( [AdminBaseView.staff_permissions.ADMIN_PROMOTION]) @use_args( { "product_id": fields.Integer(required=True, comment="商品id"), "from_datetime": fields.DateTime(required=True, comment="拼团活动开始时间"), "to_datetime": fields.DateTime(required=True, comment="拼团活动结束时间"), }, location="json") def post(self, request, args): success, msg = validate_groupon_period(args["product_id"], args["from_datetime"], args["to_datetime"]) if not success: return self.send_fail(error_text=msg) return self.send_success()
class RHBurotelStats(RHProtected): @use_kwargs({ 'start_month': fields.DateTime("%Y-%m"), 'end_month': fields.DateTime("%Y-%m") }, location='query') def process(self, start_month, end_month): start_dt, end_dt = get_month_dates(start_month, end_month) result, months = calculate_monthly_stats(start_dt, end_dt) # number of days within the boundary dates (inclusive) num_days = ((end_dt - start_dt).days + 1) return jsonify( data=result, num_days=num_days, months=[{ 'name': format_datetime(m, "MMMM YYYY", locale=session.lang), 'id': format_datetime(m, "YYYY-M"), 'num_days': ((m + relativedelta(months=1, days=-1)) - m).days + 1 } for m in months] )
class RHBurotelStatsCSV(RHProtected): @use_kwargs({ 'start_month': fields.DateTime('%Y-%m'), 'end_month': fields.DateTime('%Y-%m') }) def process(self, start_month, end_month): start_dt, end_dt = get_month_dates(start_month, end_month) result, months = calculate_monthly_stats(start_dt, end_dt) # number of days within the boundary dates (inclusive) num_days = ((end_dt - start_dt).days + 1) headers = ['Building', 'Experiment', 'Number of desks'] for m in months: headers += [m.strftime('%b %Y'), m.strftime('%b %Y (%%)')] headers.append('Total') headers.append('Total (%)') rows = [] for building, experiments in result: for experiment, row_data in experiments: row = { 'Building': building, 'Experiment': experiment, 'Number of desks': row_data['desk_count'] } for i, m in enumerate(row_data['months']): month_dt = months[i] month_duration = ( (months[i] + relativedelta(months=1, days=-1)) - months[i]).days + 1 percent = float(m) / (row_data['desk_count'] * month_duration) * 100 row[month_dt.strftime('%b %Y')] = m row[month_dt.strftime('%b %Y (%%)')] = '{:.2f}%'.format( percent) row['Total'] = row_data['bookings'] percent = float(row_data['bookings']) / ( row_data['desk_count'] * num_days) * 100 row['Total (%)'] = '{:.2f}%'.format(percent) rows.append(row) return send_csv('burotel_stats.csv', headers, rows)
def __init__(self, *, plan_id): self.plan_id = plan_id args = { 'duration': fields.Int(allow_none=True), 'location_id': fields.Int(allow_none=True), 'repeat_interval': fields.TimeDelta(allow_none=True), 'repeat_type': fields.Str(allow_none=True), 'trigger_time': fields.DateTime('%Y-%m-%d %H:%M:%S'), 'visible_hours': fields.List(fields.Int, allow_none=True), 'visible_wdays': fields.List(fields.Int, allow_none=True), } parsed_args = parser.parse(args, request) self.parsed_args = parsed_args
class MallBrowseRecord(MallBaseView): """商城-创建浏览记录""" @register_browse_record("product") def gen_product_browse_record(self, args: dict): product_id = int(args["spa_params"]["product_id"]) product_ids = list_product_ids_by_shop_id_interface( self.current_shop.id, [ProductStatus.ON, ProductStatus.OFF] ) if product_id not in product_ids: return False, "货品不存在" info = { "shop_id": self.current_shop.id, "user_id": self.current_user.id, "product_id": product_id, "start_time": args["start_time"], "duration": args["duration"], "pre_page_name": args["pre_page"].get("name"), "next_page_name": args["next_page"].get("name"), } create_product_browse_record(info) return True, None @use_args( { "fullpath": fields.String( required=True, validate=[validate.Length(1, 256)], comment="url全路径" ), "query": StrToDict(required=True, comment="路由里面的query参数"), "cur_page": StrToDict(required=True, comment="当前页面, 包含type, name2个值, str"), "pre_page": StrToDict(required=True, comment="上一个页面, 包含type, name2个值, str"), "next_page": StrToDict(required=True, comment="下一个页面, 包含type, name2个值, str"), "spa_query": StrToDict(required=True, comment="当前页面的一些参数"), "spa_params": StrToDict(required=True, comment="当前页面的一些参数"), "start_time": fields.DateTime(required=True, comment="进入当前页面的时间"), "duration": fields.Integer( required=True, validate=[validate.Range(0)], comment="在页面停留的时间" ), }, location="json" ) def post(self, request, args, shop_code): self._set_current_shop(request, shop_code) # 暂时只记录商品的访问记录, 以后再扩展 cur_page_type = args["cur_page"]["type"] gen_browse_record_func = _MAP_BROWSE_RECORD[cur_page_type] success, info = gen_browse_record_func(self, args) if not success: return self.send_fail(error_text=info) return self.send_success()
class RHTimeline(RHRoomBookingBase): def _process_args(self): self.room = None if 'room_id' in request.view_args: self.room = Room.get_one(request.view_args['room_id']) if not self.room.is_active: raise NotFound @use_kwargs({ 'start_dt': fields.DateTime(required=True), 'end_dt': fields.DateTime(required=True), 'repeat_frequency': EnumField(RepeatFrequency, missing='NEVER'), 'repeat_interval': fields.Int(missing=1), 'room_ids': fields.List(fields.Int(), missing=[]), }) def _process(self, room_ids, **kwargs): rooms = [self.room] if self.room else Room.query.filter( Room.id.in_(room_ids), Room.is_active).all() date_range, availability = get_rooms_availability(rooms, **kwargs) date_range = [dt.isoformat() for dt in date_range] for data in availability.viewvalues(): # add additional helpful attributes data.update({ 'num_days_available': len(date_range) - len(data['conflicts']), 'all_days_available': not data['conflicts'] }) serialized = _serialize_availability(availability) if self.room: availability = serialized[self.room.id] else: # keep order of original room id list availability = sorted(serialized.items(), key=lambda x: room_ids.index(x[0])) return jsonify(availability=availability, date_range=date_range)
def __init__(self): args = { 'keyword': fields.Str(), 'page': fields.Int(missing=1, validate=validate.Range(min=1)), 'per_page': fields.Int(missing=10, validate=validate.Range(min=1)), 'plan_trigger_time': fields.DelimitedList(fields.DateTime()), 'status': fields.Int(), 'task_ids': fields.DelimitedList(fields.Int()), } parsed_args = parser.parse(args, request, location='querystring') self.count = parsed_args['per_page'] self.keyword = parsed_args.get('keyword') self.plan_trigger_time = parsed_args.get('plan_trigger_time') self.start = (parsed_args['page'] - 1) * parsed_args['per_page'] self.status = parsed_args.get('status') self.task_ids = parsed_args.get('task_ids')
def test_delimited_list_as_string_v2(web_request, parser): web_request.json = {"dates": "2018-11-01,2018-11-02"} schema_cls = dict2schema({ "dates": fields.DelimitedList(fields.DateTime(format="%Y-%m-%d"), as_string=True) }) schema = schema_cls() parsed = parser.parse(schema, web_request) assert parsed["dates"] == [ datetime.datetime(2018, 11, 1), datetime.datetime(2018, 11, 2), ] dumped = schema.dump(parsed) data = dumped.data if MARSHMALLOW_VERSION_INFO[0] < 3 else dumped assert data["dates"] == "2018-11-01,2018-11-02"
class RoleUserListResource(TokenRequiredResource): get_args = { "name": fields.String(allow_none=True, validate=lambda x: 0 <= len(x) <= 255), "email": fields.Email(allow_none=True, validate=validate.Email()), "location": fields.String(allow_none=True, validate=lambda x: 0 <= len(x) <= 255), "confirmed": fields.Boolean(), "created_at": fields.DateTime(allow_none=True, format="iso8601"), } @admin_required @use_args(get_args) def get(self, query_args, id): filters = [User.role_id == id] if "name" in query_args: filters.append( User.name.like("%{filter}%".format(filter=query_args["name"]))) if "email" in query_args: filters.append( User.email.like( "%{filter}%".format(filter=query_args["email"]))) if "location" in query_args: filters.append( User.location.like( "%{filter}%".format(filter=query_args["location"]))) if "confirmed" in query_args: filters.append(User.confirmed == query_args["confirmed"]) if "created_at" in query_args: filters.append(User.created_at == query_args["created_at"]) pagination_helper = PaginationHelper( request, query=User.query.filter(*filters), resource_for_url="api.role_users", key_name="results", schema=user_schema, url_parameters={"id": id}, ) result = pagination_helper.paginate_query() return result
def test_delimited_tuple_default_delimiter(web_request, parser): """ Test load and dump from DelimitedTuple, including the use of a datetime type (similar to a DelimitedList test below) which confirms that we aren't relying on __str__, but are properly de/serializing the included fields """ web_request.json = {"ids": "1,2,2020-05-04"} schema_cls = dict2schema({ "ids": fields.DelimitedTuple( (fields.Int, fields.Int, fields.DateTime(format="%Y-%m-%d"))) }) schema = schema_cls() parsed = parser.parse(schema, web_request) assert parsed["ids"] == (1, 2, datetime.datetime(2020, 5, 4)) data = schema.dump(parsed) assert data["ids"] == "1,2,2020-05-04"
def test_delimited_list_with_datetime(web_request, parser): """ Test that DelimitedList(DateTime(format=...)) correctly parses and dumps dates to and from strings -- indicates that we're doing proper serialization of values in dump() and not just relying on __str__ producing correct results """ web_request.json = {"dates": "2018-11-01,2018-11-02"} schema_cls = Schema.from_dict( {"dates": fields.DelimitedList(fields.DateTime(format="%Y-%m-%d"))}) schema = schema_cls() parsed = parser.parse(schema, web_request) assert parsed["dates"] == [ datetime.datetime(2018, 11, 1), datetime.datetime(2018, 11, 2), ] data = schema.dump(parsed) assert data["dates"] == "2018-11-01,2018-11-02"
def __init__(self): args = { 'duration': fields.Int(allow_none=True), 'location_id': fields.Int(allow_none=True), 'repeat_interval': fields.TimeDelta(allow_none=True), 'repeat_type': fields.Str(allow_none=True), 'task_id': fields.Int(required=True), 'trigger_time': fields.DateTime('%Y-%m-%d %H:%M:%S', required=True), 'visible_hours': fields.List(fields.Int, allow_none=True), 'visible_wdays': fields.List(fields.Int, allow_none=True), } parsed_args = parser.parse(args, request) self.duration = parsed_args.get('duration') self.location_id = parsed_args.get('location_id') self.repeat_interval = parsed_args.get('repeat_interval') self.repeat_type = parsed_args.get('repeat_type') self.task_id = parsed_args['task_id'] self.trigger_time = parsed_args['trigger_time'] self.visible_hours = set(parsed_args.get('visible_hours') or []) self.visible_wdays = set(parsed_args.get('visible_wdays') or [])
class DateAddResource(Resource): dateadd_args = { 'value': fields.DateTime(required=False), 'addend': fields.Int(required=True, validate=validate.Range(min=1)), 'unit': fields.Str(missing='days', validate=validate.OneOf(['minutes', 'days'])) } @use_kwargs(dateadd_args) def post(self, value, addend, unit): """A datetime adder endpoint.""" value = value or dt.datetime.utcnow() if unit == 'minutes': delta = dt.timedelta(minutes=addend) else: delta = dt.timedelta(days=addend) result = value + delta return {'result': result.isoformat()}
class TagImageListResource(TokenRequiredResource): get_args = { "title": fields.String(allow_none=True, validate=lambda x: 0 <= len(x) <= 255), "slug": fields.String(allow_none=True, validate=lambda x: 0 <= len(x) <= 255), "author_id": fields.Integer(allow_none=True, validate=lambda x: x > 0), "created_at": fields.DateTime(allow_none=True, format="iso8601"), } @use_args(get_args) def get(self, query_args, id): filters = [] if "title" in query_args: filters.append( Image.title.like( "%{filter}%".format(filter=query_args["title"]))) if "original_filename" in query_args: filters.append( Image.original_filename.like("%{filter}%".format( filter=query_args["original_filename"]))) if "size" in query_args: filters.append(Image.sizes.contains(query_args["size"])) if "created_at" in query_args: filters.append(Image.created_at == query_args["created_at"]) pagination_helper = PaginationHelper( request, query=Tag.query.get(id).images.filter(*filters), resource_for_url="api.tag_images", key_name="results", schema=post_schema, url_parameters={"id": id}, query_args=query_args, ) result = pagination_helper.paginate_query() return result
class DateAddResource(object): """A datetime adder endpoint.""" dateadd_args = { 'value': fields.DateTime(required=False), 'addend': fields.Int(required=True, validate=validate.Range(min=1)), 'unit': fields.Str(missing='days', validate=validate.OneOf(['minutes', 'days'])) } @falcon.before(add_args(dateadd_args)) def on_post(self, req, resp): """A datetime adder endpoint.""" args = req.context['args'] value = args['value'] or dt.datetime.utcnow() if args['unit'] == 'minutes': delta = dt.timedelta(minutes=args['addend']) else: delta = dt.timedelta(days=args['addend']) result = value + delta req.context['result'] = {'result': result.isoformat()}
@verify_request_json @use_args( { 'UserId': fields.Int(), 'Phone': fields.Str(), 'Token': fields.Str(required=True), 'UserList': fields.Nested( { 'Mac': fields.Str(required=True), 'UserId': fields.Int(), 'Phone': fields.Str(), 'StartTime': fields.DateTime(format='%Y-%m-%d %H:%M'), 'EndTime': fields.DateTime(format='%Y-%m-%d %H:%M'), 'Money': fields.Float(), 'Permission': fields.Int(required=True) }, required=True) }, locations=('json', )) @verify_request_token def addUserPermission(args): userList = request.get_json().get("UserList") userId = userList.get('UserId', None) phone = userList.get('Phone', None) user = User.getUserByIdOrPhoneOrMail(userId, phone) if not user: if phone:
"""Returns fare """ from flask import ( Blueprint, current_app, jsonify ) from webargs import fields from webargs.flaskparser import use_args fare = Blueprint('fare', __name__) # pylint: disable=invalid-name @fare.route("/average_fare_heatmap") @use_args({ "date": fields.DateTime(format='%Y-%m-%d', missing='2016-01-01'), # pylint: disable=E1101 }) # pylint: disable=E1101 def get_average_fare(args): """fare heatmap """ date = args["date"] # noqa: F841 records = (current_app.fare["heatmap"] .query('date == @date') .loc[:, ["s2id", "total_amount"]]) return jsonify(records.to_dict(orient='records'))