class ServerSchema(Schema): """Serializer schema for server JSON representation.""" time_stats_fields = ['min_match_time', 'max_match_time', 'avg_match_time'] endpoint = fields.Str(required=True) title = fields.Str(required=True) total_matches = fields.Int(dump_only=True) total_players = fields.Int(dump_only=True) min_match_time = fields.TimeDelta(dump_only=True) max_match_time = fields.TimeDelta(dump_only=True) avg_match_time = fields.TimeDelta(dump_only=True) @validates('endpoint') def validate_endpoint(self, endpoint): """Validate endpoint value.""" if not re.fullmatch(r'^.+-\d{4,5}', endpoint): message = 'Endpoint must match template: domain-port.' raise ValidationError(message) @post_dump def format_time_output(self, data): """Format time fields for output.""" if all(self.time_stats_fields) in data: data['min_match_time'] = format_time(data.get('min_match_time', 0)) data['max_match_time'] = format_time(data.get('max_match_time', 0)) data['avg_match_time'] = format_time(data.get('avg_match_time', 0))
class UserSerializer(Schema): name = fields.String() open_tasks = fields.Integer() total_today = fields.TimeDelta() total_week = fields.TimeDelta() class Meta: strict = True
class PureFAPGroupSchedule(Schema): name = fields.Str() snapFrequency = fields.TimeDelta(load_from="snap_frequency") replicateFrequency = fields.TimeDelta(load_from="replicate_frequency") replicateEnabled = fields.Bool(load_from="replicate_enabled") snapEnabled = fields.Bool(load_from="snap_enabled") snapAt = fields.TimeDelta(load_from="snap_at", allow_none=True) replicateAt = fields.TimeDelta(load_from="replicate_at", allow_none=True) replicateBlackout = fields.Nested(PureFAWindow, allow_none=True, load_from="replicate_blackout", many=True)
class RouteSchema(Schema): price = fields.Nested(PriceSchema) onward_info = fields.Nested(PathInformationSchema, allow_none=True) onward_flights = fields.List(fields.Nested(FlightSchema)) onward_is_direct = fields.Bool() onward_time = fields.TimeDelta(precision=fields.TimeDelta.MINUTES) return_info = fields.Nested(PathInformationSchema, allow_none=True) return_flights = fields.List(fields.Nested(FlightSchema)) return_is_direct = fields.Bool() return_time = fields.TimeDelta(precision=fields.TimeDelta.MINUTES)
class TaskSchema(TaskMethodsMixin, ObjectSchema): class Meta: object_class = lambda: prefect.core.Task exclude_fields = ["type", "inputs", "outputs"] type = fields.Function(lambda task: to_qualified_name(type(task)), lambda x: x) name = fields.String(allow_none=True) slug = fields.String(allow_none=True) description = fields.String(allow_none=True) tags = fields.List(fields.String()) max_retries = fields.Integer(allow_none=True) retry_delay = fields.TimeDelta(allow_none=True) inputs = fields.Method("load_inputs", allow_none=True) outputs = fields.Method("load_outputs", allow_none=True) timeout = fields.Integer(allow_none=True) trigger = StatefulFunctionReference( valid_functions=[ prefect.triggers.all_finished, prefect.triggers.manual_only, prefect.triggers.always_run, prefect.triggers.all_successful, prefect.triggers.all_failed, prefect.triggers.any_successful, prefect.triggers.any_failed, prefect.triggers.some_failed, prefect.triggers.some_successful, ], # don't reject custom functions, just leave them as strings reject_invalid=False, allow_none=True, ) skip_on_upstream_skip = fields.Boolean(allow_none=True) cache_for = fields.TimeDelta(allow_none=True) cache_key = fields.String(allow_none=True) cache_validator = StatefulFunctionReference( valid_functions=[ prefect.engine.cache_validators.never_use, prefect.engine.cache_validators.duration_only, prefect.engine.cache_validators.all_inputs, prefect.engine.cache_validators.all_parameters, prefect.engine.cache_validators.partial_inputs_only, prefect.engine.cache_validators.partial_parameters_only, ], # don't reject custom functions, just leave them as strings reject_invalid=False, allow_none=True, ) auto_generated = fields.Boolean(allow_none=True)
class ContributionResultSchema(ResultSchemaBase): type = EnumField(SearchTarget, validate=require_search_target(SearchTarget.contribution)) event_id = fields.Int(required=True) contribution_id = fields.Int(required=True) title = fields.String(required=True) description = fields.String(required=True) start_dt = fields.DateTime(missing=None) end_dt = fields.DateTime(missing=None) persons = fields.List(fields.Nested(PersonSchema), required=True) location = fields.Nested(LocationResultSchema, required=True) duration = fields.TimeDelta(precision=fields.TimeDelta.MINUTES) highlight = fields.Nested(HighlightSchema, missing={}) # extra fields that are not taken from the data returned by the search engine url = fields.Method('_get_url') event_path = fields.Method('_get_event_path', dump_only=True) def _get_url(self, data): return url_for('contributions.display_contribution', event_id=data['event_id'], contrib_id=data['contribution_id']) def _get_event_path(self, data): if not (event := Event.get(data['event_id'])): return [] return [ {'type': 'event', 'id': event.id, 'title': event.title, 'url': event.url} ]
class IntervalScheduleSchema(marshmallow.Schema): run_every = fields.TimeDelta(required=True) relative = fields.Bool(default=False, missing=False) @marshmallow.post_load def make_schedule(self, data): return schedules.schedule(**data)
class TaskSerializer(Schema): id = fields.Integer() title = fields.String(required=True, validate=validate.Length(min=3, max=100)) url = fields.Url(required=False, allow_none=True) description = fields.String(validate=validate.Length(max=1000), allow_none=True) project_id = fields.Integer(required=True, validate=validate_project_id) created_at = fields.DateTime(dump_only=True) updated_at = fields.DateTime(dump_only=True) time_entries = fields.Nested(TimeEntrySerializer, many=True, dump_only=True) total_time = fields.TimeDelta(default=0, dump_only=True) is_closed = fields.Boolean(dump_only=True) @pre_load def clean_url(self, data): if not data.get('url'): data['url'] = None return data @post_load def make_task(self, data): if 'id' not in data: data['created_at'] = tz_now() data['updated_at'] = tz_now() return Task(**data)
class UserSchema(Schema): name = fields.String(required=True, validate=validate.Length(min=1, max=255)) age = fields.Float() created = fields.DateTime() created_formatted = fields.DateTime(format="%Y-%m-%d", attribute="created") created_iso = fields.DateTime(format="iso", attribute="created") updated = fields.DateTime() updated_local = fields.LocalDateTime(attribute="updated") species = fields.String(attribute="SPECIES") id = fields.String(default='no-id') homepage = fields.Url() email = fields.Email() balance = fields.Decimal() registered = fields.Boolean() hair_colors = fields.List(fields.Raw) sex_choices = fields.List(fields.Raw) finger_count = fields.Integer() uid = fields.UUID() time_registered = fields.Time() birthdate = fields.Date() since_created = fields.TimeDelta() sex = fields.Str(validate=validate.OneOf(['male', 'female'])) various_data = fields.Dict() addresses = fields.Nested(Address, many=True, validate=validate.Length(min=1, max=3)) github = fields.Nested(GithubProfile) const = fields.String(validate=validate.Length(equal=50))
class TrainingTaskSchema(BaseModelSchema): datasource_configuration_id = fields.Integer() datasource_configuration = fields.Nested(DataSourceConfigurationSchema) datasources = fields.Nested('DataSourceSchema', many=True, only=('id', 'name', 'upload_code')) name = fields.String() company_id = fields.Integer() company_configuration_id = fields.Integer() company_configuration = fields.Nested(CompanyConfigurationSchema) user_id = fields.Integer() statuses = fields.Nested(TrainingTaskStatusSchema, many=True) task_code = fields.String() configuration = fields.Raw() is_completed = fields.Boolean() running_time = fields.TimeDelta(allow_none=True) status = fields.String(allow_none=True) detection_task_list = fields.Nested('DetectionTaskSchema', many=True, only=('id', 'name', 'task_code', 'upload_code', 'created_at', 'statuses', 'datasource')) parent_training_id = fields.Integer(allow_none=True) parent_task = fields.Nested('self', only=('id', 'name', 'task_code'), allow_none=True, many=None) has_fft_enabled = fields.Bool()
class IntervalTriggerSchema(Schema): weeks = fields.Integer() days = fields.Integer() hours = fields.Integer() minutes = fields.Integer() seconds = fields.Integer() interval = fields.TimeDelta(dump_only=True) start_date = fields.DateTime() end_date = fields.DateTime() jitter = fields.Integer() @post_load def make(self, data, **_): return triggers.IntervalTrigger(**data) @post_dump def post_dump(self, output, **_): seconds = output['interval'] minutes, seconds = divmod(seconds, 60) hours, minutes = divmod(minutes, 60) days, hours = divmod(hours, 24) weeks, days = divmod(days, 7) for key, value in dict(weeks=weeks, days=days, hours=hours, minutes=minutes, seconds=seconds).items(): output[key] = value return output
class IntervalClockSchema(ObjectSchema): class Meta: object_class = prefect.schedules.clocks.IntervalClock start_date = DateTimeTZ(allow_none=True) end_date = DateTimeTZ(allow_none=True) interval = fields.TimeDelta(precision="microseconds", required=True) parameter_defaults = fields.Dict(keys=fields.Str(), values=JSONCompatible(), allow_none=True) labels = fields.List(fields.Str(), allow_none=True) @post_dump def _interval_validation(self, data: dict, **kwargs: Any) -> dict: """ Ensures interval is at least one minute in length """ if data["interval"] / 1e6 < 60: raise ValueError( "Interval can not be less than one minute when deploying to Prefect Cloud." ) return data @post_load def create_object(self, data: dict, **kwargs: Any): if data["interval"].total_seconds() < 60: raise ValueError( "Interval can not be less than one minute when deploying to Prefect Cloud." ) base_obj = super().create_object(data) return base_obj
def test_invalid_timedelta_field_deserialization(self, in_value): field = fields.TimeDelta(fields.TimeDelta.DAYS) with pytest.raises(ValidationError) as excinfo: field.deserialize(in_value) msg = '{0!r} cannot be interpreted as a valid period of time.'.format( in_value) assert msg in str(excinfo)
class CDRSchema(Schema): id = fields.Integer() start = fields.DateTime(attribute='date') answer = fields.DateTime(attribute='date_answer') end = fields.DateTime(attribute='date_end') source_name = fields.String() source_extension = fields.String(attribute='source_exten') call_direction = fields.String(attribute='direction') destination_name = fields.String() destination_extension = fields.String(attribute='destination_exten') duration = fields.TimeDelta(default=None, attribute='marshmallow_duration') answered = fields.Boolean(attribute='marshmallow_answered') tags = fields.List(fields.String(), attribute='marshmallow_tags') @pre_dump def _compute_fields(self, data): data.marshmallow_answered = True if data.date_answer else False if data.date_answer and data.date_end: data.marshmallow_duration = data.date_end - data.date_answer return data @post_dump def fix_negative_duration(self, data): if data['duration'] is not None: data['duration'] = max(data['duration'], 0) return data @pre_dump def _populate_tags_field(self, data): data.marshmallow_tags = set() for participant in data.participants: data.marshmallow_tags.update(participant.tags) return data
class BulkCommandSchema(AutoSchema): """The schema of faraday/server/api/modules/commandsrun.py has a lot of ugly things because of the Web UI backwards compatibility. I don't need that here, so I'll write a schema from scratch.""" duration = fields.TimeDelta('microseconds', required=True) class Meta: model = Command fields = ( 'command', 'duration', 'start_date', 'ip', 'hostname', 'params', 'user', 'creator', 'tool', 'import_source', ) @post_load def load_end_date(self, data): duration = data.pop('duration') data['end_date'] = data['start_date'] + duration
class UserSchema(Schema): name = fields.String(required=True, validate=validate.Length(min=1, max=255)) age = fields.Float() created = fields.DateTime() created_formatted = fields.DateTime(format="%Y-%m-%d", attribute="created", dump_only=True) created_iso = fields.DateTime(format="iso", attribute="created", dump_only=True) updated = fields.DateTime() species = fields.String(attribute="SPECIES") id = fields.String(default="no-id") homepage = fields.Url() email = fields.Email() balance = fields.Decimal() registered = fields.Boolean() hair_colors = fields.List(fields.Raw) sex_choices = fields.List(fields.Raw) finger_count = fields.Integer() uid = fields.UUID() time_registered = fields.Time() birthdate = fields.Date() since_created = fields.TimeDelta() sex = fields.Str(validate=validate.OneOf( choices=["male", "female", "non_binary", "other"], labels=["Male", "Female", "Non-binary/fluid", "Other"], )) various_data = fields.Dict() addresses = fields.Nested(Address, many=True, validate=validate.Length(min=1, max=3)) github = fields.Nested(GithubProfile) const = fields.String(validate=validate.Length(equal=50))
class IntervalScheduleSchema(ObjectSchema): class Meta: object_class = prefect.schedules.IntervalSchedule start_date = DateTimeTZ(required=True) end_date = DateTimeTZ(allow_none=True) interval = fields.TimeDelta(precision="microseconds", required=True)
class RecipeSchema(Schema): id = fields.UUID() name = fields.String() description = fields.String() author = fields.Nested('AuthorSchema') prep_time = fields.TimeDelta() cook_time = fields.TimeDelta() total_time = fields.TimeDelta() date_published = fields.DateTime() url = CustomStringField() category = fields.Nested('CategorySchema') vendor = fields.Nested('VendorSchema') language = fields.Nested('LanguageSchema') aggregate_rating = fields.Nested('AggregateRatingSchema') image = CustomStringField() ingredients = fields.List(fields.Nested('IngredientSchema'))
class UserSchema(Schema): name = fields.String() age = fields.Float() created = fields.DateTime() created_formatted = fields.DateTime(format="%Y-%m-%d", attribute="created") created_iso = fields.DateTime(format="iso", attribute="created") updated = fields.DateTime() updated_local = fields.LocalDateTime(attribute="updated") species = fields.String(attribute="SPECIES") id = fields.String(default="no-id") uppername = Uppercased(attribute='name') homepage = fields.Url() email = fields.Email() balance = fields.Price() is_old = fields.Method("get_is_old") lowername = fields.Function(lambda obj: obj.name.lower()) registered = fields.Boolean() hair_colors = fields.List(fields.Raw) sex_choices = fields.List(fields.Raw) finger_count = fields.Integer() uid = fields.UUID() time_registered = fields.Time() birthdate = fields.Date() since_created = fields.TimeDelta() sex = fields.Select(['male', 'female']) def get_is_old(self, obj): try: return obj.age > 80 except TypeError as te: raise ValidationError(text_type(te)) def make_object(self, data): return User(**data)
class NewNewdleSchema(mm.Schema): title = fields.String(validate=validate.Length(min=3, max=80), required=True) duration = fields.TimeDelta( precision=fields.TimeDelta.MINUTES, required=True, validate=lambda x: x.total_seconds() % 900 == 0, ) timezone = fields.String(validate=lambda x: x in common_timezones_set, required=True) timeslots = fields.List( fields.DateTime(required=True, format=DATETIME_FORMAT), validate=bool, required=True, ) participants = fields.List(fields.Nested(NewKnownParticipantSchema, unknown=EXCLUDE), missing=[]) private = fields.Boolean(required=True) notify = fields.Boolean(required=True) @validates('timeslots') def validate_timeslots(self, v): if len(set(v)) != len(v): raise ValidationError('Time slots are not unique')
class PureFASnap(Schema): source = fields.Str() serial = fields.Str() created = fields.DateTime() name = fields.Str() size = fields.Int() timeRemaining = fields.TimeDelta( load_from="time_remaining", allow_none=True) # in seconds
class PureFAVolume(Schema): created = fields.DateTime(load_from="created") serial = fields.Str() source = fields.Str(allow_none=True) name = fields.Str() size = fields.Int() # size in bytes timeRemaining = fields.TimeDelta( load_from="time_remaining", allow_none=True) # in seconds
class TestingFieldsSchema(Schema): integer = fields.Integer() float = fields.Float() boolean = fields.Boolean() datetime = fields.DateTime() timedelta = fields.TimeDelta() dictionary = fields.Dict() url = fields.Url() email = fields.Email()
class PureFAPGroup(Schema): name = fields.Str() hgroups = fields.List(fields.Str(), allow_none=True) hosts = fields.List(fields.Str(), allow_none=True) volumes = fields.List(fields.Str(), allow_none=True) source = fields.Str() targets = fields.Nested(PureFAPGroupTarget, allow_none=True, many=True) timeRemaining = fields.TimeDelta(load_from="time_remaining", allow_none=True)
class PureFAPod(Schema): name = fields.Str() source = fields.Str(allow_none=True) arrays = fields.Nested(PureFAPodArray, many=True) failoverPreference = fields.List(fields.Str(), load_from="failover_preference", allow_none=True) timeRemaining = fields.TimeDelta(load_from="time_remaining", allow_none=True)
class AuthSchema(Schema): access_token = fields.Str() token_type = fields.Str() expires_in = fields.TimeDelta() scope = fields.Str() @post_load def load_auth(self, data, **kwargs): return Auth(**data)
def test_timedelta_field_deserialization(self): field = fields.TimeDelta() result = field.deserialize('42') assert isinstance(result, dt.timedelta) assert total_seconds(result) == 42.0 result = field.deserialize('-42') assert total_seconds(result) == -42.0 result = field.deserialize(12.3) assert_almost_equal(total_seconds(result), 12.3)
class IntervalScheduleSchema(BaseCamelSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("interval")) start_at = fields.DateTime(required=True) end_at = fields.DateTime(allow_none=True) frequency = fields.TimeDelta(required=True) depends_on_past = fields.Bool(allow_none=True) @staticmethod def schema_config(): return V1IntervalSchedule
class ConfigVariablesSchema(Schema): """Data Model for Application Configuration Environment Variables""" class Meta: unknown = EXCLUDE DEBUG = fields.Boolean(data_key='APPUSERS_DEBUG') ENV = fields.Str(data_key='APPUSERS_ENV') SERVER_NAME = fields.Str(data_key='APPUSERS_SERVER_NAME') APPLICATION_ROOT = fields.Str(data_key='APPUSERS_APPLICATION_ROOT') SQLALCHEMY_DATABASE_URI = fields.Str(data_key='APPUSERS_DATABASE_URI') API_KEY = fields.Str(data_key='APPUSERS_API_KEY') JWT_SECRET_KEY = fields.Str(data_key='APPUSERS_SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES = fields.TimeDelta(precision='seconds', data_key='APPUSERS_ACCESS_TOKEN_EXPIRES') MAX_FAILED_LOGIN_ATTEMPTS = fields.Integer( validate=validate.Range(min=1, error='Max failed login attempts must be greater or equal {min}, is {input}'), data_key='APPUSERS_MAX_FAILED_LOGIN_ATTEMPTS') LOCK_TIMEOUT = fields.TimeDelta(precision='seconds', data_key='APPUSERS_LOCK_TIMEOUT')
class IntervalScheduleSchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("interval")) start_at = RefOrObject(fields.LocalDateTime(required=True), required=True) end_at = RefOrObject(fields.LocalDateTime(allow_none=True)) frequency = RefOrObject(fields.TimeDelta(required=True), required=True) depends_on_past = RefOrObject(fields.Bool(allow_none=True)) @staticmethod def schema_config(): return IntervalScheduleConfig