class UserSchema(Schema): name = fields.String(required=True, validate=validate.Length(min=1, max=255)) age = fields.Float() created = fields.DateTime() created_formatted = fields.DateTime(format="%Y-%m-%d", attribute="created", dump_only=True) created_iso = fields.DateTime(format="iso", attribute="created", dump_only=True) updated_naive = fields.NaiveDateTime(attribute="updated", dump_only=True) updated = fields.DateTime() species = fields.String(attribute="SPECIES") id = fields.String(default="no-id") homepage = fields.Url() email = fields.Email() balance = fields.Decimal() registered = fields.Boolean() hair_colors = fields.List(fields.Raw) sex_choices = fields.List(fields.Raw) finger_count = fields.Integer() uid = fields.UUID() time_registered = fields.Time() birthdate = fields.Date() since_created = fields.TimeDelta() sex = fields.Str(validate=validate.OneOf( choices=["male", "female", "non_binary", "other"], labels=["Male", "Female", "Non-binary/fluid", "Other"], )) various_data = fields.Dict() addresses = fields.Nested(Address, many=True, validate=validate.Length(min=1, max=3)) github = fields.Nested(GithubProfile) const = fields.String(validate=validate.Length(equal=50))
class ChangeSpareDataValidation(Schema): data_id = fields.Integer(allow_none=True) sn = fields.String(allow_none=True) id_number = fields.String(allow_none=True) patient_name = fields.String(allow_none=True) sex = fields.String(allow_none=True) age = fields.Integer(allow_none=True) tel = fields.String(allow_none=True) doctor = fields.String(allow_none=True) time = fields.Time(allow_none=True) date = fields.Date(allow_none=True) glucose = fields.Float(allow_none=True) hidden = fields.Boolean(allow_none=True) @validates('data_id') def validate_data_id(self, value): if value is not None: raise ValidationError('data_id should not be posted') @validates('sn') def validate_sn(self, value): if value is not None and len(value) != 8: raise ValidationError('the length of sn is wrong') @validates('tel') def validate_tel(self, value): if value is not None and not value.isdigit(): raise ValidationError('tel must be made by number') if value is not None and len(value) > 11 or value is not None and len( value) < 7: raise ValidationError('the length of tel is wrong') @validates('sex') def validate_sex(self, value): if value is not None and value != '男' and value != '女': raise ValidationError('sex is wrong')
class TimeCourseSchema(Schema): start_time = fields.Time() end_time = fields.Time()
class CartSchema(Schema): _id = fields.String() user = fields.String() last_updated = fields.Time() products = fields.List(fields.Nested(CartProductSchema))
class AnalysisSchema(Schema): """ Primary analysis schema """ hash_id = fields.Str(dump_only=True, description='Hashed analysis id.') parent_id = fields.Str(dump_only=True, description="Parent analysis, if cloned.") name = fields.Str(required=True, description='Analysis name.') description = fields.Str() predictions = fields.Str(description='User apriori predictions.') dataset_id = fields.Int(required=True) task_name = fields.Str(description='Task name', dump_only=True) TR = fields.Float(description='Time repetition (s)', dump_only=True) model = fields.Dict(description='BIDS model.') created_at = fields.Time(dump_only=True) modified_at = fields.Time(dump_only=True) submitted_at = fields.Time( description='Timestamp of when analysis was submitted for compilation', dump_only=True) locked = fields.Bool( description='Is analysis locked by admins?', dump_only=True) compile_traceback = fields.Str( description='Traceback of compilation error.', dump_only=True) status = fields.Str( description='PASSED, FAILED, PENDING, or DRAFT.', dump_only=True) upload_status = fields.Str( description='PASSED, FAILED, PENDING, or DRAFT.', dump_only=True) private = fields.Bool(description='Analysis private or discoverable?') predictors = fields.Nested( PredictorSchema, many=True, only='id', description='Predictor id(s) associated with analysis') runs = fields.Nested( RunSchema, many=True, only='id', description='Runs associated with analysis') @validates('dataset_id') def validate_dsid(self, value): if Dataset.query.filter_by(id=value).count() == 0: raise ValidationError('Invalid dataset id.') @validates('runs') def validate_runs(self, value): try: [Run.query.filter_by(**r).one() for r in value] except Exception: raise ValidationError('Invalid run id!') @validates('predictors') def validate_preds(self, value): try: [Predictor.query.filter_by(**r).one() for r in value] except Exception: raise ValidationError('Invalid predictor id.') @pre_load def unflatten(self, in_data): if 'runs' in in_data: in_data['runs'] = [{"id": r} for r in in_data['runs']] if 'predictors' in in_data: in_data['predictors'] = [{"id": r} for r in in_data['predictors']] return in_data @post_load def nested_object(self, args): if 'runs' in args: args['runs'] = [ Run.query.filter_by(**r).one() for r in args['runs']] if 'predictors' in args: args['predictors'] = [ Predictor.query.filter_by(**r).one() for r in args['predictors']] return args class Meta: strict = True
class AddScheduleSchema(Schema): str_type = fields.String(required=True, validate=lambda val: val in ('start', 'end', 'eat', 'rest')) time_start = fields.Time(required=True) time_end = fields.Time(required=True)
def test_invalid_time_field_deserialization(self, in_data): field = fields.Time() with pytest.raises(ValidationError) as excinfo: field.deserialize(in_data) assert excinfo.value.args[0] == 'Not a valid time.'
class SessionLogsSchema(Schema): uid = fields.String(required=True, validate=validate.Length(min=36, max=36)) url = fields.String(required=True, validate=validate.Length(min=2, max=255)) date = fields.Date(required=True) time = fields.Time(required=True)
class TestSchema(Schema): f_date = fields.Date() f_datetime = fields.DateTime() f_time = fields.Time()
class Food(Entry): value = fields.Integer() time = fields.Time() class Meta: strict = True
class ReportSchema(Schema): """ Schema for report results """ generated_at = fields.Time(description='Time report was generated') result = fields.Dict(description='Links to report resources') status = fields.Str(description='Report status') traceback = fields.Str(description='Traceback of generation error.')
class PeriodSchema(Schema): period_from = fields.Time(required=True) period_to = fields.Time(required=True)
def test_time_field(self): field = fields.Time() expected = self.user.time_registered.isoformat()[:12] assert field.output("time_registered", self.user) == expected
class NeurovaultCollectionSchema(Schema): """ Schema for report results """ uploaded_at = fields.Time(description='Time collections was created') collection_id = fields.Dict(description='NeuroVault collection id')
class DateTimeRangeSchema(BaseSchema): date = fields.Date(title=get_preflabel_lazy('date')) time_from = fields.Time(title=get_preflabel_lazy('time')) time_to = fields.Time(title=get_preflabel_lazy('time')) __model__ = DateTimeLocationModel
class AlertSchema(Schema): """Schema for alerts""" uid = fields.String(dump_only=True, dump_to='id') name = fields.String(required=True) type = fields.String(required=True) sensor_type = fields.String(required=True) value = fields.String(allow_none=True, missing='0') field = fields.String(missing=None) between_start = fields.Time(missing="00:00:00") between_end = fields.Time(missing="23:59:59") snooze = fields.Integer(missing=None) threshold_duration = fields.Integer(missing=0) alert_text = fields.String(missing=None) recipients = fields.List(fields.Email, missing=[]) web_hooks = fields.List(fields.Dict, missing=[]) alert_if = fields.String(missing=None) polygon = fields.List(fields.List(fields.Float), missing=[], allow_none=True) is_active = fields.Boolean(missing=False) created_at = fields.DateTime(dump_only=True) action_type = fields.String(missing=ALERT_ACTION_NITIFICATION) actuator_type = fields.String(missing=None) config_field = fields.String(missing=None) config_value = fields.String(missing=None) # # @pre_load # def check_action_type(self, data): # if 'action_type' in list(data.keys()) and data['action_type'] == ALERT_ACTION_TRIGGER: # self.fields['snooze'].required = False # self.fields['alert_text'].required = False # # self.fields['config_field'].required = True # self.fields['config_field'].allow_none = False # self.fields['actuator_type'].allow_none = False # self.fields['config_value'].allow_none = False # return data @pre_dump def add_action_type(self, alert): if not alert.action_type: alert.action_type = ALERT_ACTION_NITIFICATION return alert @validates_schema def valida_types(self, data): """Validate sensor type with alert type and field""" all_sensor_types = get_all_types() sensor_type = data['sensor_type'] if data['type'] not in ALERT_TYPES_ALL: raise ValidationError("Invalid alert type", "type") if sensor_type == 'all': if data["type"] not in ALERT_TYPES_COMMON: raise ValidationError( 'Invalid alert type for sensor type all. Valid values are: {}' .format(", ".join(ALERT_TYPES_COMMON)), "type") elif sensor_type in all_sensor_types.keys(): if not data['field'] and data["type"] not in ALERT_TYPES_COMMON: raise ValidationError("'field' is required", 'field') if data['field'] not in all_sensor_types[sensor_type][ 'fields'].keys(): raise ValidationError( "field is invalid for sensor type: {}".format(sensor_type), "field") else: raise ValidationError( 'Invalid sensor_type. Valid values are: all, {}'.format( ", ".join(all_sensor_types.keys())), 'sensor_type') if not "action_type" in data.keys(): data["action_type"] = ALERT_ACTION_NITIFICATION if data["action_type"] == ALERT_ACTION_TRIGGER: required_fields = ['actuator_type', 'config_field', 'config_value'] else: required_fields = ['snooze', 'alert_text'] for f in required_fields: if f not in data.keys() or data[f] is None: raise ValidationError( "%s is required for action_type = %s" % (f, data['action_type']), f)
fields.List(fields.Date(), validate=validate.Length( equal=2)) if operation == 'between' else fields.Date(), *arr[1:] ] for operation, arr in FIELD_EXTENSIONS[fields.Integer].items() } FIELD_EXTENSIONS[fields.DateTime] = { operation: [ fields.List(fields.DateTime(), validate=validate.Length( equal=2)) if operation == 'between' else fields.DateTime(), *arr[1:] ] for operation, arr in FIELD_EXTENSIONS[fields.Integer].items() } FIELD_EXTENSIONS[fields.Time] = { operation: [ fields.List(fields.Time(), validate=validate.Length( equal=2)) if operation == 'between' else fields.Time(), *arr[1:] ] for operation, arr in FIELD_EXTENSIONS[fields.Integer].items() } def extend_schema_for_extra_search(schema_cls): extended_fields_values = {} for coln, colv in dict(schema_cls._declared_fields).items(): colname = colv.attribute or coln field_ext = {} for base in colv.__class__.__mro__: field_ext = FIELD_EXTENSIONS.get(base, {}) if field_ext: break
class NeurovaultCollectionSchema(Schema): """ Schema for report results """ uploaded_at = fields.Time(description='Time images upload began') collection_id = fields.Dict(description='NeuroVault collection id') status = fields.Str(description='Upload status') traceback = fields.Str(description='Traceback of upload error.')
def test_invalid_time_field_deserialization(self): field = fields.Time() with pytest.raises(UnmarshallingError) as excinfo: field.deserialize('badvalue') msg = 'Could not deserialize {0!r} to a time object.'.format('badvalue') assert msg in str(excinfo)
class ScoreSchema(Schema): id = fields.Int(dump_only=True) timestamp = fields.Time(required=True) iq = fields.Int(required=True) dog = fields.Int(required=True)
def test_time_field(self, user): field = fields.Time() expected = user.time_registered.isoformat()[:12] assert field.serialize("time_registered", user) == expected
def test_time_field_format(self, user): fmt = "%H:%M:%S" field = fields.Time(format=fmt) assert field.serialize("birthtime", user) == user.birthtime.strftime(fmt)
(str, fields.String(required=True), "string", "string"), (float, fields.Float(required=True), 5.0, 5.0), (bool, fields.Boolean(required=True), "true", True), ( dt.datetime, fields.DateTime(required=True), "2019-02-15T12:03:14", dt.datetime(2019, 2, 15, 12, 3, 14), ), ( uuid.UUID, fields.UUID(required=True), "ec367d2b-53ac-44cc-9db1-45b81cf3b78b", uuid.UUID("ec367d2b-53ac-44cc-9db1-45b81cf3b78b"), ), (dt.time, fields.Time(required=True), "00:00:00", dt.time(0, 0)), (dt.date, fields.Date(required=True), "2019-02-15", dt.date( 2019, 2, 15)), ( dt.timedelta, fields.TimeDelta(precision="seconds", required=True), 5, dt.timedelta(seconds=5), ), (decimal.Decimal, fields.Decimal(required=True), 5.0, decimal.Decimal("5.0")), ], ) def test_basic_typing(attr_type, field, data, loaded): class Response(Schema): foo: attr_type
class ResponseScheduleSchema(Schema): str_type = fields.String() time_start = fields.Time() time_end = fields.Time()
def test_invalid_time_field_deserialization(self, in_data): field = fields.Time() with pytest.raises(ValidationError) as excinfo: field.deserialize(in_data) msg = 'Could not deserialize {0!r} to a time object.'.format(in_data) assert msg in str(excinfo)
class _Articles(Schema): id = fields.Integer(dump_to='_id', required=True) title = fields.String(dump_to='title', required=True) create_at = fields.Time(dump_to='createTime', required=True) tags = fields.List(fields.String(), dump_to='tags', required=True)
class TimeOptionSchema(Schema): id = fields.Int(required=True) event = fields.Nested(EventSchema) timestamp = fields.Time()
class ViolationsSchema(pl.BaseSchema): encounter = fields.String(allow_none=True) id = fields.String(allow_none=True) placard_st = fields.String(allow_none=True) # placard_desc = fields.String(allow_none=True) facility_name = fields.String(allow_none=True) bus_st_date = fields.Date(allow_none=True) # category_cd = fields.String(allow_none=True) description = fields.String(allow_none=True) description_new = fields.String(allow_none=True) num = fields.String(allow_none=True) street = fields.String(allow_none=True) city = fields.String(allow_none=True) state = fields.String(allow_none=True) zip = fields.String(allow_none=True) # This was (for no obvious # reason) a Float in the predecessor of this ETL job. inspect_dt = fields.Date(allow_none=True) start_time = fields.Time(allow_none=True) end_time = fields.Time(allow_none=True) municipal = fields.String(allow_none=True) rating = fields.String(allow_none=True) low = fields.String(allow_none=True) medium = fields.String(allow_none=True) high = fields.String(allow_none=True) url = fields.String(allow_none=True) class Meta(): ordered = True @pre_load def strip_strings(self, data): fields_to_recode = ['facility_name', 'description'] for field in fields_to_recode: data[field] = fix_encoding_errors(data[field].strip()) fields_to_strip = ['num'] for field in fields_to_strip: if type(data[field]) == str: data[field] = fix_encoding_errors(data[field].strip()) def fix_dates_times(self, data): if data['bus_st_date']: #data['bus_st_date'] = datetime.strptime(data['bus_st_date'], "%m/%d/%Y %H:%M").date().isoformat() data['bus_st_date'] = parser.parse( data['bus_st_date']).date().isoformat() if data['inspect_dt']: #data['inspect_dt'] = datetime.strptime(data['inspect_dt'], "%m/%d/%Y %H:%M").date().isoformat() data['inspect_dt'] = parser.parse( data['inspect_dt']).date().isoformat() if data['start_time']: data['start_time'] = datetime.strptime( data['start_time'], "%I:%M %p").time().isoformat() if data['end_time']: data['end_time'] = datetime.strptime( data['end_time'], "%I:%M %p").time().isoformat() to_string = ['encounter', 'id'] for field in to_string: if type(data[field]) != str: data[field] = str(int(data[field]))
class ChargerConfigSchema(Schema): """ ChargerConfigModel Schema """ charger_name = fields.Str(required=True) charger_tariff = fields.Float(dump_only=True) modified_at = fields.DateTime(dump_only=True) secret_key = fields.Str(dump_only=True) wtf_csrf_secret_key = fields.Str(dump_only=True) use_reloader = fields.Bool(dump_only=True) factor_whkm = fields.Float(dump_only=True) modbus_interval = fields.Integer(dump_only=True) autosession_enabled = fields.Bool(dump_only=True) autosession_minutes = fields.Int(dump_only=True) autosession_energy = fields.Float(dump_only=True) autosession_condense_same_odometer = fields.Bool(dump_only=True) pulseled_min = fields.Int(dump_only=True) pulseled_max = fields.Int(dump_only=True) gpio_mode = fields.Str(dump_only=True) pin_led_red = fields.Int(dump_only=True) pin_led_green = fields.Int(dump_only=True) pin_led_blue = fields.Int(dump_only=True) pin_buzzer = fields.Int(dump_only=True) pin_evse_switch = fields.Int(dump_only=True) pin_evse_led = fields.Int(dump_only=True) peakhours_offpeak_enabled = fields.Bool(dump_only=True) peakhours_allow_peak_one_period = fields.Bool(dump_only=True) webcharge_on_dashboard = fields.Bool(dump_only=True) wakeup_vehicle_on_data_request = fields.Bool(dump_only=True) auth_webcharge = fields.Bool(dump_only=True) restrict_dashboard_access = fields.Bool(dump_only=True) restrict_menu = fields.Bool(dump_only=True) allow_local_dashboard_access = fields.Bool(dump_only=True) router_ip_address = fields.Str(dump_only=True) receipt_prefix = fields.Str(dump_only=True) backup_enabled = fields.Bool(dump_only=True) backup_interval = fields.Str(dump_only=True) backup_interval_weekday = fields.Str(dump_only=True) backup_interval_calday = fields.Str(dump_only=True) backup_time_of_day = fields.Time(dump_only=True) backup_local_history = fields.Int(dump_only=True) backup_success_timestamp = fields.DateTime(dump_only=True) os_backup_enabled = fields.Bool(dump_only=True) os_backup_type = fields.Str(dump_only=True) os_backup_history = fields.Int(dump_only=True) smb_backup_servername_or_ip_address = fields.Str(dump_only=True) smb_backup_username = fields.Str(dump_only=True) smb_backup_password = fields.Str(dump_only=True) smb_backup_service_name = fields.Str(dump_only=True) smb_backup_remote_path = fields.Str(dump_only=True) vehicle_data_on_dashboard = fields.Bool(dump_only=True)
def test_time_field(self): field = fields.Time() assert_equal( field.output("time_registered", self.user), self.user.time_registered.isoformat()[:12], )