class PATCH(Schema): id = fields.Int() name = fields.Str()
class UserSchema(Schema): name = fields.Str() class Meta: dump_only = ('name', )
def test_field_with_additional_metadata(self): field = fields.Str(minLength=6, maxLength=100) res = swagger.field2property(field) assert res['maxLength'] == 100 assert res['minLength'] == 6
class CredentialDefinitionSendResultsSchema(OpenAPISchema): """Results schema for schema send request.""" credential_definition_id = fields.Str( description="Credential definition identifier", **INDY_CRED_DEF_ID )
def test_fields_default_location_mapping(self): field_dict = {'field': fields.Str()} res = swagger.fields2parameters(field_dict, default_in='headers') assert res[0]['in'] == 'header'
class V10PresentationExchangeSchema(BaseExchangeSchema): """Schema for de/serialization of v1.0 presentation exchange records.""" class Meta: """V10PresentationExchangeSchema metadata.""" model_class = V10PresentationExchange presentation_exchange_id = fields.Str( required=False, description="Presentation exchange identifier", example=UUIDFour.EXAMPLE, # typically a UUID4 but not necessarily ) connection_id = fields.Str( required=False, description="Connection identifier", example=UUIDFour.EXAMPLE, # typically a UUID4 but not necessarily ) thread_id = fields.Str( required=False, description="Thread identifier", example=UUIDFour.EXAMPLE, # typically a UUID4 but not necessarily ) initiator = fields.Str( required=False, description="Present-proof exchange initiator: self or external", example=V10PresentationExchange.INITIATOR_SELF, validate=OneOf(["self", "external"]), ) role = fields.Str( required=False, description="Present-proof exchange role: prover or verifier", example=V10PresentationExchange.ROLE_PROVER, validate=OneOf(["prover", "verifier"]), ) state = fields.Str( required=False, description="Present-proof exchange state", example=V10PresentationExchange.STATE_VERIFIED, ) presentation_proposal_dict = fields.Dict( required=False, description="Serialized presentation proposal message") presentation_request = fields.Dict( required=False, description="(Indy) presentation request (also known as proof request)", ) presentation = fields.Dict( required=False, description="(Indy) presentation (also known as proof)") verified = fields.Str( # tag: must be a string required=False, description="Whether presentation is verified: true or false", example="true", validate=OneOf(["true", "false"]), ) auto_present = fields.Bool( required=False, description="Prover choice to auto-present proof as verifier requests", example=False, ) error_msg = fields.Str(required=False, description="Error message", example="Invalid structure")
class ConsentContentSchema(Schema): expiration = fields.Str(required=True) limitation = fields.Str(required=True) dictatedBy = fields.Str(required=True) validityTTL = fields.Str(required=True)
class AuthSchema(Schema): """Auth schema.""" status = fields.Str() token = fields.Str()
class DEBUGServiceDiscoveryRecordSchema(BaseRecordSchema): class Meta: model_class = "DEBUGServiceDiscoveryRecord" services = fields.List(fields.Nested(ServiceRecordSchema())) connection_id = fields.Str()
class PhaseDiffFmapFileSchema(BaseFmapFileSchema): suffix = fields.Str(dump_default="phasediff", validate=validate.Equal("phasediff")) metadata = fields.Nested(PhaseDiffMetadataSchema())
class PhaseFmapFileSchema(BaseFmapFileSchema): suffix = fields.Str(validate=validate.OneOf(["phase1", "phase2"])) metadata = fields.Nested(TEMetadataSchema())
class EPIFmapFileSchema(BaseFmapFileSchema): suffix = fields.Str(dump_default="epi", validate=validate.Equal("epi")) tags = fields.Nested(EPIFmapTagsSchema(), dump_default=dict()) metadata = fields.Nested(PEDirMetadataSchema())
class LoginSchema(Schema): email = fields.Str(required=True) password = fields.Str(required=True)
class POST(Schema): name = fields.Str(required=True) @post_load def post_load(self, data, **kwargs): return Animal(**data)
class BookSchema(Schema): title = fields.Str() author = fields.Str()
class ClassSchedualSchema(SchedualeSchema): time = fields.Str() class_id = fields.Int()
class MeetingSchema(Schema): id = fields.Number() project_id = fields.Number() date = fields.DateTime() time = fields.Str() subject = fields.Str() project_bilan1 = fields.Str(required=False, missing="") project_bilan2 = fields.Str(required=False, missing="") adentis_bilan1 = fields.Str(required=False, missing="") adentis_bilan2 = fields.Str(required=False, missing="") adentis_bilan3 = fields.Str(required=False, missing="") manager_signature = fields.Str(required=False, missing="") consultant_signature = fields.Str(required=False, missing="") client_signature = fields.Str(required=False, missing="") created_at = fields.DateTime() updated_at = fields.DateTime() last_updated_by = fields.Str()
class PicassoQuerySchema(Schema): query = fields.Str(required=True)
class T1wFileSchema(BaseFileSchema): datatype = fields.Str(default="anat", validate=validate.Equal("anat")) suffix = fields.Str(default="T1w", validate=validate.Equal("T1w")) extension = fields.Str(validate=validate.OneOf([".nii", ".nii.gz"])) tags = fields.Nested(AnatTagsSchema, default=dict())
class ClientSchema(BaseSchema): host = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_API_HOST) version = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_API_VERSION) debug = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_DEBUG) log_level = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_LOG_LEVEL) authentication_type = fields.Str( allow_none=True, data_key=POLYAXON_KEYS_AUTHENTICATION_TYPE) is_managed = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_IS_MANAGED) is_service = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_IS_SERVICE) is_local = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_IS_LOCAL) is_offline = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_IS_OFFLINE) is_ops = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_IS_OPS) in_cluster = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_K8S_IN_CLUSTER) no_op = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_NO_OP) timeout = fields.Float(allow_none=True, data_key=POLYAXON_KEYS_TIMEOUT) tracking_timeout = fields.Float(allow_none=True, data_key=POLYAXON_KEYS_TRACKING_TIMEOUT) timezone = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_TIME_ZONE, default="UTC") watch_interval = fields.Int(allow_none=True, data_key=POLYAXON_KEYS_WATCH_INTERVAL) interval = fields.Float(allow_none=True, data_key=POLYAXON_KEYS_INTERVAL) verify_ssl = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_VERIFY_SSL) ssl_ca_cert = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_SSL_CA_CERT) cert_file = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_CERT_FILE) key_file = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_KEY_FILE) assert_hostname = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_ASSERT_HOSTNAME) connection_pool_maxsize = fields.Int( allow_none=True, data_key=POLYAXON_KEYS_CONNECTION_POOL_MAXSIZE) upload_size_warn = fields.Int(allow_none=True, data_key=POLYAXON_KEYS_UPLOAD_SIZE_WARN) upload_size_max = fields.Int(allow_none=True, data_key=POLYAXON_KEYS_UPLOAD_SIZE_MAX) archive_root = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_ARCHIVE_ROOT) header = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_HEADER) header_service = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_HEADER_SERVICE) pod_id = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_K8S_POD_ID) namespace = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_K8S_NAMESPACE) no_api = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_NO_API) agent_path = fields.Str(allow_none=True, data_key=POLYAXON_KEYS_AGENT_PATH) set_agent = fields.Bool(allow_none=True, data_key=POLYAXON_KEYS_SET_AGENT) @staticmethod def schema_config(): return ClientConfig
class AddServiceSchema(Schema): label = fields.Str(required=True) service_schema = fields.Nested(ServiceSchema()) consent_schema = fields.Nested(ConsentSchema())
class ArticleSchema(Schema): title = fields.Str(required=True) content = fields.Str(required=True) categories = fields.List(fields.Str()) tags = fields.List(fields.Str())
class CredentialDefinitionsCreatedResultsSchema(OpenAPISchema): """Results schema for cred-defs-created request.""" credential_definition_ids = fields.List( fields.Str(description="Credential definition identifiers", **INDY_CRED_DEF_ID) )
class DataContextConfigSchema(Schema): config_version = fields.Number( validate=lambda x: 0 < x < 100, error_messages={"invalid": "config version must " "be a number."}, ) datasources = fields.Dict(keys=fields.Str(), values=fields.Nested(DatasourceConfigSchema)) expectations_store_name = fields.Str() validations_store_name = fields.Str() evaluation_parameter_store_name = fields.Str() plugins_directory = fields.Str(allow_none=True) validation_operators = fields.Dict(keys=fields.Str(), values=fields.Dict()) stores = fields.Dict(keys=fields.Str(), values=fields.Dict()) notebooks = fields.Nested(NotebooksConfigSchema, allow_none=True) data_docs_sites = fields.Dict(keys=fields.Str(), values=fields.Dict(), allow_none=True) config_variables_file_path = fields.Str(allow_none=True) anonymous_usage_statistics = fields.Nested( AnonymizedUsageStatisticsConfigSchema) # noinspection PyMethodMayBeStatic # noinspection PyUnusedLocal def handle_error(self, exc, data, **kwargs): """Log and raise our custom exception when (de)serialization fails.""" logger.error(exc.messages) raise ge_exceptions.InvalidDataContextConfigError( "Error while processing DataContextConfig.", exc) @validates_schema def validate_schema(self, data, **kwargs): if "config_version" not in data: raise ge_exceptions.InvalidDataContextConfigError( "The key `config_version` is missing; please check your config file.", validation_error=ValidationError("no config_version key"), ) if not isinstance(data["config_version"], (int, float)): raise ge_exceptions.InvalidDataContextConfigError( "The key `config_version` must be a number. Please check your config file.", validation_error=ValidationError( "config version not a number"), ) # When migrating from 0.7.x to 0.8.0 if data["config_version"] == 0 and ("validations_store" in list( data.keys()) or "validations_stores" in list(data.keys())): raise ge_exceptions.UnsupportedConfigVersionError( "You appear to be using a config version from the 0.7.x series. This version is no longer supported." ) elif data["config_version"] < MINIMUM_SUPPORTED_CONFIG_VERSION: raise ge_exceptions.UnsupportedConfigVersionError( "You appear to have an invalid config version ({}).\n The version number must be at least {}. " "Please see the migration guide at https://docs.greatexpectations.io/en/latest/guides/how_to_guides/migrating_versions.html" .format(data["config_version"], MINIMUM_SUPPORTED_CONFIG_VERSION), ) elif data["config_version"] > CURRENT_CONFIG_VERSION: raise ge_exceptions.InvalidDataContextConfigError( "You appear to have an invalid config version ({}).\n The maximum valid version is {}." .format(data["config_version"], CURRENT_CONFIG_VERSION), validation_error=ValidationError("config version too high"), )
class UserSchema(Schema): name = fields.Str(dump_only=True)
class UserPersonalSchema(Schema): nickname = fields.Str() send_receive = fields.Str() beans = fields.Str() email = fields.Str()
def test_field_with_choices(self): field = fields.Str( validate=validate.OneOf(['freddie', 'brian', 'john'])) res = swagger.field2property(field) assert set(res['enum']) == {'freddie', 'brian', 'john'}
class TradeSchema(Schema): user_name = fields.Str() time = fields.Str() id = fields.Int()
def test_field_with_allow_none(self): field = fields.Str(allow_none=True) res = swagger.field2property(field) assert res['x-nullable'] is True
class RHUserSearch(RHProtected): """Search for users based on given criteria""" def _serialize_pending_user(self, entry): first_name = entry.data.get('first_name') or '' last_name = entry.data.get('last_name') or '' full_name = '{} {}'.format(first_name, last_name).strip() or 'Unknown' affiliation = entry.data.get('affiliation') or '' email = entry.data['email'].lower() ext_id = '{}:{}'.format(entry.provider.name, entry.identifier) # detailed data to put in redis to create a pending user if needed self.externals[ext_id] = { 'first_name': first_name, 'last_name': last_name, 'email': email, 'affiliation': affiliation, 'phone': entry.data.get('phone') or '', 'address': entry.data.get('address') or '', } # simple data for the search results return { '_ext_id': ext_id, 'id': None, 'identifier': 'ExternalUser:{}'.format(ext_id), 'email': email, 'affiliation': affiliation, 'full_name': full_name, } def _serialize_entry(self, entry): if isinstance(entry, User): return search_result_schema.dump(entry) else: return self._serialize_pending_user(entry) def _process_pending_users(self, results): cache = GenericCache('external-user') for entry in results: ext_id = entry.pop('_ext_id', None) if ext_id is not None: cache.set(ext_id, self.externals[ext_id], 86400) @use_kwargs( { 'first_name': fields.Str(validate=validate.Length(min=1)), 'last_name': fields.Str(validate=validate.Length(min=1)), 'email': fields.Str(validate=lambda s: len(s) > 3 and '@' in s), 'affiliation': fields.Str(validate=validate.Length(min=1)), 'exact': fields.Bool(missing=False), 'external': fields.Bool(missing=False), 'favorites_first': fields.Bool(missing=False) }, validate=validate_with_message( lambda args: args.viewkeys( ) & {'first_name', 'last_name', 'email', 'affiliation'}, 'No criteria provided')) def _process(self, exact, external, favorites_first, **criteria): matches = search_users(exact=exact, include_pending=True, external=external, **criteria) self.externals = {} results = sorted((self._serialize_entry(entry) for entry in matches), key=itemgetter('full_name')) if favorites_first: favorites = {u.id for u in session.user.favorite_users} results.sort(key=lambda x: x['id'] not in favorites) total = len(results) results = results[:10] self._process_pending_users(results) return jsonify(users=results, total=total)