class SynthSettingSchema(ma.ModelSchema, BaseSchema): synth = fields.Nested('SynthSchema', only=('synth_name', 'id', 'jam_id')) class Meta: model = SynthSetting
class EditingRevisionSignedSchema(EditingRevisionSchema): files = fields.List(fields.Nested(EditingRevisionSignedFileSchema))
class ServiceActionResultSchema(mm.Schema): publish = fields.Boolean() comments = fields.List(fields.Nested(ReviewCommentSchema)) tags = fields.List(fields.Int()) redirect = fields.String()
class Job_SubcategorySchema(ma.Schema): id = fields.Integer(dump_only=True) name = fields.String() category = fields.Nested('Job_CategorySchema')
class FunctionListSchema(Schema): items = fields.List(fields.Nested(FunctionSchema()))
class AuthOutputSchema(OutputSchema): user = fields.Nested(UserOutputSchema, required=True) token = fields.String(required=True)
name = fields.Str(required=True) role = fields.Str() affiliations = fields.List(fields.Str()) email = fields.Str() class MetadataSchemaV1(StrictKeysMixin): """Schema for the record metadata.""" def get_{{ cookiecutter.pid_name }}(self, obj): """Get record id.""" pid = self.context.get('pid') return pid.pid_value if pid else missing {{ cookiecutter.pid_name }} = fields.Method(deserialize='get_{{ cookiecutter.pid_name }}') title = SanitizedUnicode(required=True) keywords = fields.Nested(fields.Str(), many=True) publication_date = DateString() contributors = fields.Nested(ContributorSchemaV1, many=True, required=True) class RecordSchemaV1(StrictKeysMixin): """Record schema.""" metadata = fields.Nested(MetadataSchemaV1) created = fields.Str(dump_only=True) revision = fields.Integer(dump_only=True) updated = fields.Str(dump_only=True) id = fields.Number( required=True, attribute='metadata.{{ cookiecutter.pid_name }}')
class v1alpha1_ToolConfigAzureSchema(Schema): auth = fields.Nested(v1alpha1_ToolConfigAzureAuthSchema) cloudpartner = fields.Nested(v1alpha1_ToolConfigAzureCloudpartnerSchema) image = fields.Nested(v1alpha1_ToolConfigAzureImageSchema) storage = fields.Nested(v1alpha1_ToolConfigAzureStorageSchema)
class v1alpha1_ToolConfigEc2Schema(Schema): auth = fields.Nested(v1alpha1_ToolConfigEc2AuthSchema) image = fields.Nested(v1alpha1_ToolConfigEc2ImageSchema) storage = fields.Nested(v1alpha1_ToolConfigEc2StorageSchema)
class LiteratureReferencesSchemaJSONUIV1(JSONSchemaUIV1): """Schema for references.""" metadata = fields.Nested(MetadataReferencesSchemaUIV1, dump_only=True)
class RecordMetadataSchemaV1(Schema): _collections = fields.Raw() abstracts = fields.Raw() accelerator_experiments = fields.Nested(AcceleratorExperimentSchemaV1, dump_only=True, many=True) acquisition_source = fields.Raw() arxiv_eprints = fields.Raw() authors = ListWithLimit(fields.Nested( AuthorSchemaV1, dump_only=True), limit=10) book_series = fields.Raw() # citeable = fields.Raw() citation_count = fields.Raw() collaborations = fields.List(fields.Nested(CollaborationSchemaV1, dump_only=True), attribute="collaborations") collaborations_with_suffix = fields.List(fields.Nested(CollaborationWithSuffixSchemaV1, dump_only=True), attribute="collaborations") conference_info = fields.Nested( ConferenceInfoItemSchemaV1, dump_only=True, attribute='publication_info', many=True) control_number = fields.Raw() # copyright = fields.Raw() # core = fields.Raw() corporate_author = fields.Raw() # curated = fields.Raw() date = fields.Method('get_formatted_date') # deleted = fields.Raw() # deleted_records = fields.Raw() document_type = fields.Raw() # documents = fields.Raw() dois = fields.Nested(DOISchemaV1, dump_only=True, many=True) # editions = fields.Raw() # energy_ranges = fields.Raw() external_system_identifiers = fields.Nested( ExternalSystemIdentifierSchemaV1, dump_only=True, many=True) # figures = fields.Raw() # funding_info = fields.Raw() imprints = fields.Raw() inspire_categories = fields.Raw() isbns = fields.List(fields.Nested(IsbnSchemaV1, dump_only=True)) keywords = fields.Raw() languages = fields.Raw() # legacy_creation_date = fields.Raw() # license = fields.Raw() # new_record = fields.Raw() number_of_authors = fields.Method('get_number_of_authors') number_of_pages = fields.Raw() number_of_references = fields.Method('get_number_of_references') persistent_identifiers = fields.Raw() preprint_date = fields.Raw() # public_notes = fields.Raw() publication_info = fields.Nested( PublicationInfoItemSchemaV1, dump_only=True, many=True) # publication_type = fields.Raw() # record_affiliations = fields.Raw() # refereed = fields.Raw() # related_records = fields.Raw() report_numbers = fields.Raw() # self = fields.Raw() texkeys = fields.Raw() thesis_info = fields.Nested(ThesisInfoSchemaV1, dump_only=True) # title_translations = fields.Raw() titles = fields.Raw() # urls = fields.Raw() # withdrawn = fields.Raw() def get_formatted_date(self, data): earliest_date = data.get('earliest_date') if earliest_date is None: return missing return format_date(earliest_date) def get_number_of_authors(self, data): authors = data.get('authors') return self.get_len_or_missing(authors) def get_number_of_references(self, data): number_of_references = data.get('number_of_references') if number_of_references is not None: return number_of_references references = data.get('references') return self.get_len_or_missing(references) @staticmethod def get_len_or_missing(maybe_none_list): if maybe_none_list is None: return missing return len(maybe_none_list) @post_dump def strip_empty(self, data): return strip_empty_values(data)
class LiteratureAuthorsSchemaJSONUIV1(JSONSchemaUIV1): """Schema for literature authors.""" metadata = fields.Nested(MetadataAuthorsSchemaV1, dump_only=True)
class LiteratureRecordSchemaJSONUIV1(JSONSchemaUIV1): """Schema for record UI.""" metadata = fields.Nested(RecordMetadataSchemaV1, dump_only=True)
class UserSchema(ma.Schema): activity = fields.List(fields.Nested(ActivitySchema)) class Meta: fields = ('id', 'username', 'password', 'email', 'activity')
class TacInfo(Schema): """Defines the schema for TAC API(version 2) response.""" tac = fields.String(required=True) gsma = fields.Nested(GSMA, required=True)
class v1alpha1_ToolConfigGceSchema(Schema): auth = fields.Nested(v1alpha1_ToolConfigGceAuthSchema) image = fields.Nested(v1alpha1_ToolConfigGceImageSchema) storage = fields.Nested(v1alpha1_ToolConfigGceStorageSchema)
class BatchTacInfo(Schema): """Defines schema for Batch TAC API version 2 response.""" results = fields.List(fields.Nested(TacInfo, required=True))
logging.error(nation.name) if columns: nation.columns = columns nation.reset_order() logging.error("HERE ARE THE ROWS %s", nation.order.rows) logging.error(nation.columns) db.session.commit() return "OK", 200 @app.route("/api/order/<int:nation_id>", methods=["GET"]) @marshal_with({ 'nation': fields.Nested(NationSchema, required=True), 'rows': fields.List(fields.Dict(), missing=[]), 'chat': fields.List(fields.Dict(), missing=[]) }) def get_order_by_nation_by_id(nation_id): nation = db.session.query(Nation).filter(Nation.id == nation_id).first() returned_rows = [] column_aggregations = [] for column in nation.columns: if column["type"] == MenuColumnType.MULTI.name: aggregation = {} for option in column["options"]:
class IngredientSchema(ma.ModelSchema, BaseSchema): recipe = fields.Nested('RecipeSchema', many=True, exclude=('ingredients')) class Meta: model = Ingredient
class DeploymentSchema(BaseCamelSchema): deployment_type = fields.Str(allow_none=True, validate=validate.OneOf( DeploymentTypes.VALUES)) deployment_chart = fields.Str( allow_none=True, validate=validate.OneOf(DeploymentCharts.VALUES), default=DeploymentCharts.PLATFORM, ) deployment_version = fields.Str(allow_none=True) namespace = fields.Str(allow_none=True) rbac = fields.Nested(RBACSchema, allow_none=True) polyaxon_secret = fields.Str(allow_none=True) internal_token = fields.Str(allow_none=True) password_length = fields.Int(allow_none=True) ssl = fields.Nested(SSLSchema, allow_none=True) encryption_secret = fields.Str(allow_none=True) platform_secret = fields.Str(allow_none=True) agent_secret = fields.Str(allow_none=True) timezone = fields.Str(allow_none=True) environment = fields.Str(allow_none=True) ingress = fields.Nested(IngressSchema, allow_none=True) user = fields.Nested(RootUserSchema, allow_none=True) node_selector = fields.Dict(allow_none=True) tolerations = fields.List(fields.Dict(allow_none=True), allow_none=True) affinity = fields.Dict(allow_none=True) limit_resources = fields.Bool(allow_none=True) global_replicas = fields.Int(allow_none=True) global_concurrency = fields.Int(allow_none=True) gateway = fields.Nested(ApiServiceSchema, allow_none=True) api = fields.Nested(ApiServiceSchema, allow_none=True) streams = fields.Nested(ApiServiceSchema, allow_none=True) scheduler = fields.Nested(WorkerServiceSchema, allow_none=True) worker = fields.Nested(WorkerServiceSchema, allow_none=True) beat = fields.Nested(ServiceSchema, allow_none=True) agent = fields.Nested(AgentServiceSchema, allow_none=True) operator = fields.Nested(ServiceSchema, allow_none=True) init = fields.Nested(HelperServiceSchema, allow_none=True) sidecar = fields.Nested(HelperServiceSchema, allow_none=True) tables_hook = fields.Nested(ServiceSchema, allow_none=True) clean_hooks = fields.Nested(ServiceSchema, allow_none=True) hooks = fields.Nested(HooksSchema, allow_none=True) postgresql = fields.Nested(PostgresqlSchema, allow_none=True) redis = fields.Nested(RedisSchema, allow_none=True) rabbitmq = fields.Nested(RabbitmqSchema, data_key="rabbitmq-ha", allow_none=True) broker = fields.Str(allow_none=True, validate=validate.OneOf(["redis", "rabbitmq"])) email = fields.Nested(EmailSchema, allow_none=True) ldap = fields.Raw(allow_none=True) metrics = fields.Raw(allow_none=True) image_pull_secrets = fields.List(fields.Str(), allow_none=True) host_name = fields.Str(allow_none=True) allowed_hosts = fields.List(fields.Str(), allow_none=True) intervals = fields.Nested(IntervalsSchema, allow_none=True) artifacts_store = fields.Nested(ConnectionTypeSchema, allow_none=True) connections = fields.List(fields.Nested(ConnectionTypeSchema), allow_none=True) notification_connections = fields.List( fields.Nested(ConnectionTypeSchema), allow_none=True, ) log_level = fields.Str(allow_none=True) security_context = fields.Nested(SecurityContextSchema, allow_none=True) external_services = fields.Nested(ExternalServicesSchema, allow_none=True) debug_mode = fields.Bool(allow_none=True) organization_key = fields.Str(allow_none=True) auth = fields.Nested(AuthSchema, allow_none=True) ui = fields.Nested(UISchema, allow_none=True) include_chart_revision = fields.Bool(allow_none=True) operators = fields.Nested(OperatorsSchema, allow_none=True) istio = fields.Dict(allow_none=True) # Pending validation dns = fields.Raw(allow_none=True) @staticmethod def schema_config(): return DeploymentConfig @validates_schema def validate_deployment(self, data, **kwargs): validate_deployment_chart( deployment_chart=data.get("deployment_chart"), agent=data.get("agent"), environment=data.get("environment"), ) validate_platform_deployment( postgresql=data.get("postgresql"), redis=data.get("redis"), rabbitmq=data.get("rabbitmq"), broker=data.get("broker"), scheduler=data.get("scheduler"), worker=data.get("worker"), beat=data.get("beat"), external_services=data.get("external_services"), ) validate_gateway(data.get("gateway")) if data.get("deployment_chart") == DeploymentCharts.AGENT: wrong_agent_deployment_keys( polyaxon_secret=data.get("polyaxon_secret"), internal_token=data.get("internal_token"), password_length=data.get("password_length"), user=data.get("user"), global_replicas=data.get("global_replicas"), global_concurrency=data.get("global_concurrency"), api=data.get("api"), scheduler=data.get("scheduler"), worker=data.get("worker"), beat=data.get("beat"), tables_hook=data.get("tables_hook"), hooks=data.get("hooks"), postgresql=data.get("postgresql"), redis=data.get("redis"), rabbitmq=data.get("rabbitmq"), broker=data.get("broker"), email=data.get("email"), ldap=data.get("ldap"), intervals=data.get("intervals"), metrics=data.get("metrics"), organization_key=data.get("organization_key"), ui=data.get("ui"), )
class Schema(validation.RequestSchema): foo = fields.Integer(required=True) bar = fields.Email() nested = fields.Nested(NestedSchema)
class AtlasConfig(Schema): modeDebug = fields.Boolean(missing=False) STRUCTURE = fields.String(missing="Nom de la structure") NOM_APPLICATION = fields.String(missing="Nom de l'application") URL_APPLICATION = fields.String(missing="") ID_GOOGLE_ANALYTICS = fields.String(missing="UA-xxxxxxx-xx") GLOSSAIRE = fields.Boolean(missing=False) IGNAPIKEY = fields.String(missing="") AFFICHAGE_INTRODUCTION = fields.Boolean(missing=True) AFFICHAGE_FOOTER = fields.Boolean(missing=False) AFFICHAGE_STAT_GLOBALES = fields.Boolean(missing=True) AFFICHAGE_DERNIERES_OBS = fields.Boolean(missing=True) AFFICHAGE_EN_CE_MOMENT = fields.Boolean(missing=True) AFFICHAGE_RANG_STAT = fields.Boolean(missing=True) RANG_STAT = fields.List( fields.Dict, missing=[ { "phylum": ["Arthropoda", "Mollusca"] }, { "phylum": ["Chordata"] }, { "regne": ["Plantae"] }, ], ) RANG_STAT_FR = fields.List( fields.String, missing=["Faune invertébrée", "Faune vertébrée", "Flore"]) LIMIT_RANG_TAXONOMIQUE_HIERARCHIE = fields.Integer(missing=13) LIMIT_FICHE_LISTE_HIERARCHY = fields.Integer(missing=28) REMOTE_MEDIAS_URL = fields.String(missing="http://mondomaine.fr/taxhub/") REMOTE_MEDIAS_PATH = fields.String(missing="static/medias/") REDIMENSIONNEMENT_IMAGE = fields.Boolean(missing=False) TAXHUB_URL = fields.String(required=False, missing=None) ATTR_DESC = fields.Integer(missing=100) ATTR_COMMENTAIRE = fields.Integer(missing=101) ATTR_MILIEU = fields.Integer(missing=102) ATTR_CHOROLOGIE = fields.Integer(missing=103) ATTR_MAIN_PHOTO = fields.Integer(missing=1) ATTR_OTHER_PHOTO = fields.Integer(missing=2) ATTR_LIEN = fields.Integer(missing=3) ATTR_PDF = fields.Integer(missing=4) ATTR_AUDIO = fields.Integer(missing=5) ATTR_VIDEO_HEBERGEE = fields.Integer(missing=6) ATTR_YOUTUBE = fields.Integer(missing=7) ATTR_DAILYMOTION = fields.Integer(missing=8) ATTR_VIMEO = fields.Integer(missing=9) PROTECTION = fields.Boolean(missing=False) DISPLAY_PATRIMONIALITE = fields.Boolean(missing=False) PATRIMONIALITE = fields.Dict( missing={ "label": "Patrimonial", "config": { "oui": { "icon": "custom/images/logo_patrimonial.png", "text": "Ce taxon est patrimonial", } }, }) STATIC_PAGES = fields.Dict( missing={ "presentation": { "title": "Présentation de l'atlas", "picto": "glyphicon-question-sign", "order": 0, "template": "static/custom/templates/presentation.html", } }) PORTAL_PAGES = fields.Dict( missing={ 'mammiferes': { 'title': "Mammifères", 'picto': 'glyphicon-book', 'order': 0, 'cd_ref': '186206' }, }) AFFICHAGE_MAILLE = fields.Boolean(missing=False) ZOOM_LEVEL_POINT = fields.Integer(missing=11) LIMIT_CLUSTER_POINT = fields.Integer(missing=1000) NB_DAY_LAST_OBS = fields.String(missing="7 day") NB_LAST_OBS = fields.Integer(missing=100) TEXT_LAST_OBS = fields.String( missing="Les observations des agents ces 7 derniers jours |") TYPE_DE_REPRESENTATION_MAILLE = fields.String( validate=OneOf(["LAST_OBS", "NB_OBS"])) MAP = fields.Nested(MapConfig, missing=dict()) # Specify how communes are ordered # if true by length else by name ORDER_COMMUNES_BYLENGTH = fields.Boolean(missing=False) # coupe le nom_vernaculaire à la 1ere virgule sur les fiches espèces SPLIT_NOM_VERN = fields.Integer(missing=True) @validates_schema def validate_url_taxhub(self, data): """ TAXHHUB_URL doit être rempli si REDIMENSIONNEMENT_IMAGE = True """ if data["REDIMENSIONNEMENT_IMAGE"] and data["TAXHUB_URL"] is None: raise ValidationError({ "Le champ TAXHUB_URL doit être rempli si REDIMENSIONNEMENT_IMAGE = True" })
class ReportsResponse(BaseSerializer): reports = fields.List(fields.Nested(Report, required=True)) totalItems = fields.Int(required=True)
class ReleveCruvedSchema(MA.Schema): releve = fields.Nested(GeojsonReleveSchema, dump_only=True) cruved = fields.Nested(CruvedSchema, dump_only=True)
class WebPushRequestSchema(Schema): subscription = fields.Nested(WebPushSubscriptionSchema, load_from="token_info") headers = fields.Nested(WebPushBasicHeaderSchema) crypto_headers = PolyField( load_from="headers", deserialization_schema_selector=conditional_crypto_deserialize, ) body = fields.Raw() token_info = fields.Raw() vapid_version = fields.String(required=False, missing=None) @validates('body') def validate_data(self, value): max_data = self.context["conf"].max_data if value and len(value) > max_data: raise InvalidRequest( "Data payload must be smaller than {}".format(max_data), errno=104, ) @pre_load def token_prep(self, d): d["token_info"] = dict( api_ver=d["path_kwargs"].get("api_ver"), token=d["path_kwargs"].get("token"), ckey_header=d["headers"].get("crypto-key", ""), auth_header=d["headers"].get("authorization", ""), ) return d def validate_auth(self, d): crypto_exceptions = [ KeyError, ValueError, TypeError, VapidAuthException ] if self.context['conf'].use_cryptography: crypto_exceptions.append(InvalidSignature) else: crypto_exceptions.extend([JOSEError, JWTError, AssertionError]) auth = d["headers"].get("authorization") needs_auth = d["token_info"]["api_ver"] == "v2" if not needs_auth and not auth: return try: vapid_auth = parse_auth_header(auth) token = vapid_auth['t'] d["vapid_version"] = "draft{:0>2}".format(vapid_auth['version']) if vapid_auth['version'] == 2: public_key = vapid_auth['k'] else: public_key = d["subscription"].get("public_key") jwt = extract_jwt(token, public_key, is_trusted=self.context['conf'].enable_tls_auth, use_crypto=self.context['conf'].use_cryptography) except tuple(crypto_exceptions): raise InvalidRequest("Invalid Authorization Header", status_code=401, errno=109, headers={"www-authenticate": PREF_SCHEME}) if "exp" not in jwt: raise InvalidRequest("Invalid bearer token: No expiration", status_code=401, errno=109, headers={"www-authenticate": PREF_SCHEME}) try: jwt_expires = int(jwt['exp']) except ValueError: raise InvalidRequest("Invalid bearer token: Invalid expiration", status_code=401, errno=109, headers={"www-authenticate": PREF_SCHEME}) now = time.time() jwt_has_expired = now > jwt_expires if jwt_has_expired: raise InvalidRequest("Invalid bearer token: Auth expired", status_code=401, errno=109, headers={"www-authenticate": PREF_SCHEME}) jwt_too_far_in_future = (jwt_expires - now) > (60 * 60 * 24) if jwt_too_far_in_future: raise InvalidRequest( "Invalid bearer token: Auth > 24 hours in " "the future", status_code=401, errno=109, headers={"www-authenticate": PREF_SCHEME}) jwt_crypto_key = base64url_encode(public_key) d["jwt"] = dict(jwt_crypto_key=jwt_crypto_key, jwt_data=jwt) @post_load def fixup_output(self, d): # Verify authorization # Note: This has to be done here, since schema validation takes place # before nested schemas, and in this case we need all the nested # schema logic to run first. self.validate_auth(d) # Merge crypto headers back in if d["crypto_headers"]: d["headers"].update({ k.replace("_", "-"): v for k, v in d["crypto_headers"].items() }) # Base64-encode data for Web Push d["body"] = base64url_encode(d["body"]) # Set the notification based on the validated request schema data d["notification"] = WebPushNotification.from_webpush_request_schema( data=d, fernet=self.context["conf"].fernet, legacy=self.context["conf"]._notification_legacy, ) return d
class ArticleGenerateRequest(Schema): items = fields.List(fields.Nested(Item))
class ServiceReviewEditableSchema(mm.Schema): publish = fields.Boolean(missing=True) comment = fields.String() comments = fields.List(fields.Nested(ReviewCommentSchema)) tags = fields.List(fields.Int())
class AptSchema(BaseSchema): packages = fields.List(fields.Nested(AptPackageSchema()), required=True) @post_load def make_apt(self, data): return Apt(**data)
class DimensionSubmissionSchema(Schema): id = fields.Integer(required=True) questions = fields.Nested(QuestionSubmissionSchema, many=True, required=True)
class docSchema(sDocPrj): input = fields.Nested(docInput) result = fields.Nested(docResult)