class DeploymentTargetListResponseSchema(Schema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) description = fields.String(required=False, allow_none=True) enabled = fields.Boolean(required=True) target_type = fields.String(required=True, validate=[OneOf(list(DeploymentType.__dict__.keys()))]) descriptor = fields.String(required=False, allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data): """ Deserialize data into an instance of DeploymentTarget""" return DeploymentTarget(**data) class Meta: ordered = True
class QobjSchema(BaseSchema): """Schema for Qobj.""" # Required properties. qobj_id = String(required=True) schema_version = String(required=True) # Required properties depend on Qobj type. config = Nested(QobjConfigSchema, required=True) experiments = Nested(QobjExperimentSchema, required=True, many=True) header = Nested(QobjHeaderSchema, required=True) type = String(required=True, validate=OneOf(choices=(QobjType.QASM, QobjType.PULSE))) @pre_load def add_schema_version(self, data, **_): """Add the schema version on loading.""" data['schema_version'] = QOBJ_VERSION return data
class JobStepLogCreateRequestSchema(Schema): """ JSON serialization schema """ id = fields.Integer(allow_none=True) level = fields.String(required=True) status = fields.String( required=True, validate=[OneOf(list(StatusExecution.__dict__.keys()))]) date = fields.DateTime(required=True) message = fields.String(required=True) type = fields.String(required=True, missing='TEXT', default='TEXT') # noinspection PyUnresolvedReferences @post_load def make_object(self, data): """ Deserialize data into an instance of JobStepLog""" return JobStepLog(**data) class Meta: ordered = True
class InvitationSchema(BaseSchema): userIntID = EmqInteger(allow_none=True) inviteEmail = EmqEmail(required=True) roleIntID = EmqInteger(required=True) tenantID = EmqString(allow_none=True, len_max=9) inviteStatus = EmqInteger(allow_none=True, validate=OneOf([0, 1])) @validates('inviteEmail') def validate_invite_email(self, value): email = Invitation.query \ .filter_tenant(tenant_uid=g.tenant_uid)\ .filter(Invitation.inviteEmail == value).first() if email: raise DataExisted(field='inviteEmail') email = User.query.filter(User.email == value).first() if email: raise DataExisted(field='email')
class UserSchema(ma.SQLAlchemyAutoSchema): class Meta: model = User load_only = ["password"] dump_only = ["admin"] email = ma.String(required=True, validate=Length(min=4)) password = ma.String(required=True, validate=Length(min=6)) country = ma.String(validate=Length(equal=2)) display_name = ma.String(validate=Length(max=30)) href = ma.String() product = ma.String(validate=[ Length(max=20), OneOf(["free", "premium", "student", "duo", "family"]) ]) object_type = ma.String(validate=Equal("user")) uri = ma.String() admin = ma.Boolean()
class PermissionListResponseSchema(Schema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) description = fields.String(required=True) applicable_to = fields.String( required=False, allow_none=True, validate=[OneOf(list(AssetType.__dict__.keys()))]) # noinspection PyUnresolvedReferences @post_load def make_object(self, data): """ Deserialize data into an instance of Permission""" return Permission(**data) class Meta: ordered = True
class ChangeCitizenSchema(Schema): town = fields.Str(validate=validateStringFileds) street = fields.Str(validate=validateStringFileds) building = fields.Str(validate=validateStringFileds) apartment = fields.Number(strict=True, validate=isPositiveNumber) name=fields.Str(validate=validateName) gender=fields.Str(validate=OneOf(['male','female'])) birth_date=fields.DateTime(strict=True, format='%d.%m.%Y',validate=isPastDate) relatives=fields.List(fields.Integer()) class Meta: strict = True unknown = RAISE @validates_schema def validate_notnull(self, data, **kwargs): if data == {}: raise ValidationError('Data should contains more then 0 fields')
class JobResultItemResponseSchema(BaseSchema): """ JSON serialization schema """ title = fields.String(required=False, allow_none=True) type = fields.String(required=True, validate=[OneOf(list(ResultType.__dict__.keys()))]) content = fields.String(required=False, allow_none=True) task = fields.Function(lambda x: {"id": x.task_id}) operation = fields.Function(lambda x: {"id": x.operation_id}) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of JobResult""" return JobResult(**data) class Meta: ordered = True unknown = EXCLUDE
class BaseRequestSchema(BaseSchema): area = fields.String() street = fields.String() town = fields.String() postcode = fields.String() county = fields.String() country = fields.String() latitude = fields.String() longitude = fields.String() lat_min = fields.String() lat_max = fields.String() lon_min = fields.String() lon_max = fields.String() output_type = fields.String(allow_none=False) area_type = fields.String(validate=OneOf(choices=('streets', 'postcodes', 'outcodes', 'areas', 'towns', 'counties')), allow_none=False)
class Environment: env = Env() env.read_env() PREFIX = env.str("PREFIX") PYTHON_ENV = env.str("PYTHON_ENV", validate=OneOf( ["production", "development"], error="PYTHON_ENV must be one of: {choices}")) TEST_DATA = env.bool("TEST_DATA", False) DISCORD_TOKEN = get_docker_secret("DISCORD_TOKEN", autocast_name=False) DB_NAME = env.str("DB_NAME") DB_HOST = env.str("DB_HOST") DB_PORT = env.int("DB_PORT") DB_USER = env.str("DB_USER") DB_USER_PASS = get_docker_secret("DB_USER_PASS", autocast_name=False) CR_API_TOKEN = get_docker_secret("CR_API_TOKEN", autocast_name=False) CR_API_URL = env.str("CR_API_URL")
class RelocateSchema(Schema): uuid = fields.Str(validate=Length(equal=36), required=True) relocated_call = fields.Str(validate=Length(min=1), required=True, attribute='relocated_channel') initiator_call = fields.Str(validate=Length(min=1), required=True, attribute='initiator_channel') recipient_call = fields.Str(validate=Length(min=1), required=True, attribute='recipient_channel') completions = fields.List(fields.Str(validate=OneOf(VALID_COMPLETIONS)), missing=['answer']) initiator = fields.Str(validate=Length(equal=36), required=True) timeout = fields.Integer(validate=Range(min=1), missing=30) class Meta: strict = True
class MeaningfulLocationsBetweenLabelODMatrixSchema(AggregationUnitMixin, BaseSchema): query_kind = fields.String( validate=OneOf(["meaningful_locations_between_label_od_matrix"])) start_date = ISODateTime(required=True) end_date = ISODateTime(required=True) label_a = fields.String(required=True) label_b = fields.String(required=True) labels = fields.Dict(keys=fields.String(), values=fields.Dict( )) # TODO: use custom field here for stricter validation! tower_hour_of_day_scores = TowerHourOfDayScores(required=True) tower_day_of_week_scores = TowerDayOfWeekScores(required=True) tower_cluster_radius = fields.Float(required=False, default=1.0) tower_cluster_call_threshold = fields.Integer(required=False, default=0) event_types = EventTypes() subscriber_subset = SubscriberSubset(required=False) __model__ = MeaningfulLocationsBetweenLabelODMatrixExposed
class HangupDestinationSchema(BaseDestinationSchema): cause = fields.String( validate=OneOf(['busy', 'congestion', 'normal']), attribute='subtype', required=True, ) @post_dump def convert_cause_to_user(self, data, **kwargs): if data['cause'] == 'hangup': data['cause'] = 'normal' return data @post_load def convert_cause_to_database(self, data, **kwargs): if data['subtype'] == 'normal': data['subtype'] = 'hangup' return data
class GetDataSchema(Schema): """ Defines the schema for the `/api/v1.0/data?...` endpoint. * dataset: dataset key (e.g. giops_day) * variable: variable key (e.g. votemper) * time: time index (e.g. 0) * depth: depth index (e.g. 49) * geometry_type: the "shape" of the data being requested """ dataset = fields.Str(required=True) variable = fields.Str(required=True) time = fields.Integer(required=True) depth = fields.Integer(required=True, validate=Range(min=0)) geometry_type = fields.Str(required=True, validate=OneOf({"point", "line", "area"}))
class ContentShareSchema(marshmallow.Schema): email = RFCEmail(example="*****@*****.**", required=True, validate=share_email_validator) share_token = marshmallow.fields.String( description="token of the content_share", example="444b026a068d42d6ab5e12fde08efb7b") has_password = marshmallow.fields.Boolean(required=True) share_group_uuid = marshmallow.fields.String(required=True) share_id = marshmallow.fields.Int( example=4, required=True, description="id of this share", validate=strictly_positive_int_validator, ) content_id = marshmallow.fields.Integer( example=6, validate=strictly_positive_int_validator, description="content id of the content shared.", ) created = marshmallow.fields.DateTime(format=DATETIME_FORMAT, description="Share creation date") disabled = marshmallow.fields.DateTime(format=DATETIME_FORMAT, description="Share disabled date", allow_none=True) is_disabled = marshmallow.fields.Boolean( required=True, description="is this share disabled ?") url = marshmallow.fields.URL( example= "http://localhost:6543/ui/guest-download/444b026a068d42d6ab5e12fde08efb7b" ) direct_url = marshmallow.fields.URL( allow_none=True, example= "http://localhost:6543/api/public/guest-download/444b026a068d42d6ab5e12fde08efb7b/myfile.txt", ) author_id = marshmallow.fields.Integer( example=3, validate=strictly_positive_int_validator, required=True) author = marshmallow.fields.Nested(UserDigestSchema) type = marshmallow.fields.String( validate=OneOf([share_type.value for share_type in ContentShareType]), example=ContentShareType.EMAIL.value, description="type of sharing", )
class WorkflowItemResponseSchema(Schema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) description = fields.String(required=False, allow_none=True) enabled = fields.Boolean(required=True, missing=True) created = fields.DateTime(required=True, missing=datetime.datetime.utcnow) updated = fields.DateTime(required=True, missing=datetime.datetime.utcnow) version = fields.Integer(required=True) image = fields.String(required=False, allow_none=True) is_template = fields.Boolean(required=True, missing=False) is_system_template = fields.Boolean(required=True, missing=False) is_public = fields.Boolean(required=True, missing=False) forms = fields.Function(lambda x: load_json(x.forms)) deployment_enabled = fields.Boolean(required=True, missing=False) type = fields.String(required=True, missing=WorkflowType.WORKFLOW, validate=[OneOf(list(WorkflowType.__dict__.keys()))]) tasks = fields.Nested('tahiti.schema.TaskItemResponseSchema', allow_none=True, many=True) flows = fields.Nested('tahiti.schema.FlowItemResponseSchema', allow_none=True, many=True) platform = fields.Nested('tahiti.schema.PlatformItemResponseSchema', required=True) user = fields.Function(lambda x: { "id": x.user_id, "name": x.user_name, "login": x.user_login }) permissions = fields.Nested( 'tahiti.schema.WorkflowPermissionItemResponseSchema', allow_none=True, many=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data): """ Deserialize data into an instance of Workflow""" return Workflow(**data) class Meta: ordered = True
class TrackSchema(MobileClientSchema): """Combination of :class:`StoreTrackSchema` and :class:`UploadedTrackSchema`.""" album = fields.Str(required=True) albumArtRef = fields.Nested(ImageRefSchema, many=True) albumArtist = fields.Str() albumAvailableForPurchase = fields.Bool() albumId = fields.Str() artist = fields.Str(required=True) artistArtRef = fields.Nested(ImageRefSchema, many=True) artistId = fields.List(fields.Str) beatsPerMinute = fields.Integer() clientId = fields.Str() comment = fields.Str() composer = fields.Str() creationTimestamp = fields.Str() deleted = fields.Bool() discNumber = fields.Int() durationMillis = fields.Str(required=True) estimatedSize = fields.Str() explicitType = fields.Str() genre = fields.Str() id = fields.Str() kind = fields.Str(required=True, validate=Equal('sj#track')) lastModifiedTimestamp = fields.Str() lastRatingChangeTimestamp = fields.Str() nid = fields.Str() playCount = fields.Int() primaryVideo = fields.Nested(VideoSchema) rating = fields.Str(validate=OneOf( ['0', '1', '5'], labels=('Not Rated', 'Thumbs Down', 'Thumbs Up'), error="rating is not one of {choices} ({labels}).", )) recentTimestamp = fields.Str() storeId = fields.Str() title = fields.Str(required=True) totalDiscCount = fields.Int() totalTrackCount = fields.Int() trackAvailableForPurchase = fields.Bool() trackAvailableForSubscription = fields.Bool() trackNumber = fields.Int() trackType = fields.Str() year = fields.Int()
class UserSchema(UserDigestSchema): """ Complete user schema """ email = marshmallow.fields.Email(required=True, example="*****@*****.**") created = marshmallow.fields.DateTime( format=DATETIME_FORMAT, description="Date of creation of the user account" ) is_active = marshmallow.fields.Bool( example=True, description="true if the user is active, " "false if the user has been deactivated" " by an admin. Default is true", ) is_deleted = marshmallow.fields.Bool( example=False, description="true if the user account has been deleted. " "Default is false" ) # TODO - G.M - 17-04-2018 - Restrict timezone values timezone = StrippedString( description=FIELD_TIMEZONE_DESC, example="Europe/Paris", validate=user_timezone_validator ) profile = StrippedString( attribute="profile", validate=user_profile_validator, example="trusted-users", description=FIELD_PROFILE_DESC, ) lang = StrippedString( description=FIELD_LANG_DESC, example="en", required=False, validate=user_lang_validator, allow_none=True, default=None, ) auth_type = marshmallow.fields.String( validate=OneOf([auth_type_en.value for auth_type_en in AuthType]), example=AuthType.INTERNAL.value, description="authentication system of the user", ) class Meta: description = "Representation of a tracim user account"
class ConnectionSchema(BaseSchema): id = ma.fields.String(dump_only=True, description="Identifier of the Connection") data = ma.fields.String( load_only=True, description="Metadata associated to this Connection. This populates property `server_data` of the Connection object", ) status = ma.fields.String( dump_only=True, validate=OneOf(["pending", "active"]), description="Status of the Connection", ) sessionId = SessionId( data_key="session_id", dump_only=True, description="Identifier of the Session to which the user is connected", ) createdAt = Timestamp( data_key="created_at", dump_only=True, description="Time when the connection was created", ) activeAt = Timestamp( data_key="active_at", dump_only=True, allow_none=True, description="Time when the Connection was taken by a user by calling method Session.connect with the Connection's token property", ) platform = ma.fields.String( dump_only=True, allow_none=True, description="Complete description of the platform used by the participant to connect to the Session", ) token = ma.fields.String(dump_only=True, description="Token of the Connection") serverData = ma.fields.String( data_key="server_data", dump_only=True, description="Data assigned to the Connection in your application's server-side when creating the Connection", ) clientData = ma.fields.String( data_key="client_data", dump_only=True, allow_none=True, description="Data assigned to the Connection in your application's client-side when calling Session.connect", )
class SimpleDenoiseInputSchema(argschema.ArgSchema, DenoiseBaseSchema): log_level = argschema.fields.LogLevel(default="INFO") size = argschema.fields.Float( required=True, description=("filter size for the time axis. " "If filter_type is 'uniform' this value will be cast " "to an integer and used as a boxcar width. If " "filter_type is 'gaussian', this value remains a float " "and is the sigma for the Gaussian filter.")) filter_type = argschema.fields.Str( required=True, validate=OneOf(["uniform", "gaussian"]), description=("the type of temporal filter to apply to each pixel's " "trace.")) n_parallel_workers = argschema.fields.Int( required=False, default=1, description=("how many multiprocessing workers to use. If set to " "1, multiprocessing is not invoked."))
class RevisionSchema(ContentDigestSchema): comment_ids = marshmallow.fields.List( marshmallow.fields.Int( example=4, validate=Range(min=1, error="Value must be greater than 0"), )) revision_id = marshmallow.fields.Int( example=12, validate=Range(min=1, error="Value must be greater than 0"), ) revision_type = marshmallow.fields.String( example=ActionDescription.CREATION, validate=OneOf(ActionDescription.allowed_values()), ) created = marshmallow.fields.DateTime( format=DATETIME_FORMAT, description='Content creation date', ) author = marshmallow.fields.Nested(UserDigestSchema)
class MetadataConfig(Schema): NB_AF_DISPLAYED = fields.Integer(missing=50, validate=OneOf([10, 25, 50, 100])) ENABLE_CLOSE_AF = fields.Boolean(missing=False) AF_SHEET_CLOSED_LINK_NAME = fields.String( missing="Lien du certificat de dépôt") CLOSED_AF_TITLE = fields.String(missing="") AF_PDF_TITLE = fields.String(missing="Cadre d'acquisition: ") DS_PDF_TITLE = fields.String(missing="") MAIL_SUBJECT_AF_CLOSED_BASE = fields.String(missing="") MAIL_CONTENT_AF_CLOSED_ADDITION = fields.String(missing="") MAIL_CONTENT_AF_CLOSED_PDF = fields.String(missing="") MAIL_CONTENT_AF_CLOSED_URL = fields.String(missing="") MAIL_CONTENT_AF_CLOSED_GREETINGS = fields.String(missing="") CLOSED_MODAL_LABEL = fields.String(missing="Fermer un cadre d'acquisition") CLOSED_MODAL_CONTENT = fields.String( missing="""L'action de fermeture est irréversible. Il ne sera plus possible d'ajouter des jeux de données au cadre d'acquisition par la suite.""" )
def __init__(self, required=False, validate=None, allow_none=True, missing=None, **kwargs): if validate is not None: raise ValueError( "The EventTypes field provides its own validation " "and thus does not accept a the 'validate' argument.") super().__init__( fields.String(validate=OneOf(["calls", "sms", "mds", "topups"])), required=required, validate=Length(min=1), allow_none=allow_none, missing=missing, **kwargs, )
class FilterSetPutSchema(FilterSetSchema): name = fields.String(required=False, allow_none=False, validate=Length(0, 500)) description = fields.String(required=False, allow_none=False, validate=[Length(1, 1000)]) json_metadata = fields.String(required=False, allow_none=False) owner_type = fields.String(allow_none=False, required=False, validate=OneOf([DASHBOARD_OWNER_TYPE])) @post_load def validate( # pylint: disable=unused-argument self, data: Mapping[Any, Any], *, many: Any, partial: Any) -> Dict[str, Any]: if JSON_METADATA_FIELD in data: self._validate_json_meta_data(data[JSON_METADATA_FIELD]) return cast(Dict[str, Any], data)
class PaymentValidator(Schema): """ validates order creation request """ order_id = fields.Str(required=True) amount = fields.Integer(required=True) currency = fields.Str(required=True) type = fields.Str(validate=OneOf(["creditcard", "debitcard"]), required=True) card = fields.Nested(Card, required=True) @validates('order_id') def order_exists(self, value): order_exists = order_table.query.filter_by(order_id=value).first() if not order_exists: raise ValidationError("Order does not exist!") def handle_error(self, exc, data, **kwargs): #todo: log error to splunk, ES etc logging.error(exc.messages)
class MeaningfulLocationsAggregateSchema( StartAndEndField, EventTypesField, SubscriberSubsetField, AggregationUnitMixin, BaseSchema, ): # query_kind parameter is required here for claims validation query_kind = fields.String(validate=OneOf(["meaningful_locations_aggregate"])) label = fields.String(required=True) labels = fields.Dict( required=True, keys=fields.String(), values=fields.Dict() ) # TODO: use custom field here for stricter validation! tower_hour_of_day_scores = TowerHourOfDayScores(required=True) tower_day_of_week_scores = TowerDayOfWeekScores(required=True) tower_cluster_radius = fields.Float(required=False, default=1.0) tower_cluster_call_threshold = fields.Integer(required=False, default=0) __model__ = MeaningfulLocationsAggregateExposed
class EventSchema(Schema): """Event loader schema.""" EVENT_TYPE_MAP = { 'RelationshipCreated': EventType.RelationshipCreated, 'RelationshipDeleted': EventType.RelationshipDeleted, } id = fields.UUID(required=True, load_from='ID') event_type = fields.Method(deserialize='get_event_type', required=True, validate=OneOf(EventType), load_from='EventType') description = fields.Str(load_from='Description') creator = fields.Str(required=True, load_from='Creator') source = fields.Str(required=True, load_from='Source') payload = fields.Method(deserialize='get_payload', required=True, load_from='Payload') time = fields.Method(deserialize='get_time', required=True, load_from='Time') @pre_load def store_original_payload(self, data): """Store a copy the entire original payload.""" self.context['original_payload'] = data def get_event_type(self, obj): """Get the enum value for type of the event.""" return self.EVENT_TYPE_MAP.get(obj, missing) def get_time(self, obj): """Parse the time value of the event.""" try: return arrow.get(obj).datetime except ParserError as e: raise ValidationError("Invalid time format: {0}. ISO 8601 UTC " "timestamp required.".format(obj)) def get_payload(self, obj): """Get the previously stored original payload.""" return self.context['original_payload']
class ClusterCreateRequestSchema(BaseSchema): """ JSON serialization schema """ name = fields.String(required=True) description = fields.String(required=True) enabled = fields.Boolean(required=True) type = fields.String(required=False, allow_none=True, missing=ClusterType.SPARK_LOCAL, default=ClusterType.SPARK_LOCAL, validate=[OneOf(list(ClusterType.__dict__.keys()))]) address = fields.String(required=True) executors = fields.Integer(required=False, allow_none=True, missing=1, default=1) executor_cores = fields.Integer(required=False, allow_none=True, missing=1, default=1) executor_memory = fields.String(required=False, allow_none=True, missing='1M', default='1M') auth_token = fields.String(required=False, allow_none=True) ui_parameters = fields.String(required=False, allow_none=True) general_parameters = fields.String(required=False, allow_none=True) flavors = fields.Nested('stand.schema.ClusterFlavorCreateRequestSchema', allow_none=True, many=True) platforms = fields.Nested( 'stand.schema.ClusterPlatformCreateRequestSchema', allow_none=True, many=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Cluster""" return Cluster(**data) class Meta: ordered = True unknown = EXCLUDE
class _CommonLTILaunchSchema(PyramidRequestSchema): """Fields common to different types of LTI launches.""" locations = ["form"] context_id = fields.Str(required=True) context_title = fields.Str(required=True) lti_version = fields.Str(validate=OneOf(["LTI-1p0"]), required=True) oauth_consumer_key = fields.Str(required=True) tool_consumer_instance_guid = fields.Str(required=True) user_id = fields.Str(required=True) custom_canvas_api_domain = fields.Str() custom_canvas_course_id = fields.Str() launch_presentation_return_url = fields.Str() lis_person_name_full = fields.Str() lis_person_name_family = fields.Str() lis_person_name_given = fields.Str() tool_consumer_info_product_family_code = fields.Str()
class MeaningfulLocationsAggregateSchema(Schema): # query_kind parameter is required here for claims validation query_kind = fields.String(validate=OneOf(["meaningful_locations_aggregate"])) start_date = fields.Date(required=True) end_date = fields.Date(required=True) aggregation_unit = AggregationUnit(required=True) label = fields.String(required=True) labels = fields.Dict( required=True, keys=fields.String(), values=fields.Dict() ) # TODO: use custom field here for stricter validation! tower_hour_of_day_scores = TowerHourOfDayScores(required=True) tower_day_of_week_scores = TowerDayOfWeekScores(required=True) tower_cluster_radius = fields.Float(required=False, default=1.0) tower_cluster_call_threshold = fields.Integer(required=False, default=0) subscriber_subset = SubscriberSubset(required=False) @post_load def make_query_object(self, params, **kwargs): return MeaningfulLocationsAggregateExposed(**params)