def sort_by_annotations_position(cls: Type, elts: Collection[T], key: Callable[[T], str]) -> List[T]: annotations: Dict[str, Any] = OrderedDict() for base in reversed(cls.__mro__): annotations.update(getattr(base, "__annotations__", ())) positions = {key: i for i, key in enumerate(annotations)} return sorted(elts, key=lambda elt: positions.get(key(elt), len(positions)))
def object(self, tp: AnyType, fields: Sequence[ObjectField]) -> JsonSchema: cls = get_origin_or_type(tp) flattened_schemas: List[JsonSchema] = [] pattern_properties = {} additional_properties: Union[bool, JsonSchema] = self.additional_properties properties = {} required = [] for field in fields: if field.flattened: self._check_flattened_schema(cls, field) flattened_schemas.append(self.visit_field(field)) elif field.pattern_properties is not None: if field.pattern_properties is ...: pattern = infer_pattern(field.type, self.default_conversion) else: assert isinstance(field.pattern_properties, Pattern) pattern = field.pattern_properties pattern_properties[pattern] = self._properties_schema(field) elif field.additional_properties: additional_properties = self._properties_schema(field) else: alias = self.aliaser(field.alias) if is_typed_dict(cls): is_required = field.required else: is_required = self._field_required(field) properties[alias] = self.visit_field(field, is_required) if is_required: required.append(alias) alias_by_names = {f.name: f.alias for f in fields}.__getitem__ dependent_required = get_dependent_required(cls) result = json_schema( type=JsonType.OBJECT, properties=properties, required=required, additionalProperties=additional_properties, patternProperties=pattern_properties, dependentRequired=OrderedDict( (alias_by_names(f), sorted(map(alias_by_names, dependent_required[f]))) for f in sorted(dependent_required, key=alias_by_names)), ) if flattened_schemas: result = json_schema( type=JsonType.OBJECT, allOf=[result, *flattened_schemas], unevaluatedProperties=False, ) return result
def object_fields( tp: AnyType, deserialization: bool = False, serialization: bool = False ) -> Mapping[str, ObjectField]: class GetFields(ObjectVisitor[Sequence[ObjectField]]): def _skip_field(self, field: ObjectField) -> bool: return (field.skip.deserialization and serialization) or ( field.skip.serialization and deserialization ) def object( self, cls: Type, fields: Sequence[ObjectField] ) -> Sequence[ObjectField]: return fields try: return OrderedDict((f.name, f) for f in GetFields().visit(tp)) except Unsupported: raise TypeError(f"{tp} doesn't have fields")
def _visit(self, cls: AnyType) -> Return: if cls in PRIMITIVE_TYPES: return self.primitive(cls) if is_dataclass(cls): return self.dataclass( cls, *dataclass_types_and_fields(cls)) # type: ignore if hasattr(cls, "__supertype__"): return self.new_type(cls, cls.__supertype__) if cls is Any: return self.any() if cls in COLLECTION_TYPES: return self.collection(cls, Any) if cls in MAPPING_TYPES: return self.mapping(cls, Any, Any) try: issubclass(cls, object) except TypeError: pass else: if issubclass(cls, Enum): return self.enum(cls) for primitive in PRIMITIVE_TYPES: if issubclass(cls, primitive): return self.subprimitive(cls, primitive) # NamedTuple if issubclass(cls, tuple) and hasattr(cls, "_fields"): if hasattr(cls, "__annotations__"): types = type_hints_cache(cls) elif hasattr(cls, "__field_types"): # pragma: no cover types = cls._field_types # type: ignore else: # pragma: no cover types = OrderedDict( (f, Any) for f in cls._fields) # type: ignore return self.named_tuple(cls, types, cls._field_defaults) # type: ignore if isinstance(cls, _LiteralMeta): # pragma: no cover py36 return self.literal(cls.__values__) # type: ignore # cannot use issubclass(..., TypedDict) if isinstance(cls, _TypedDictMeta): total = cls.__total__ # type: ignore assert isinstance(cls, type) return self.typed_dict(cls, type_hints_cache(cls), total) return self.unsupported(cls)
def _fields_and_init( cls: type, fields_and_methods: Union[Iterable[Any], Callable[[], Iterable[Any]]] ) -> Tuple[Sequence[ObjectField], Callable[[Any, Any], None]]: fields = object_fields(cls) output_fields: Dict[str, ObjectField] = OrderedDict() methods = [] if callable(fields_and_methods): fields_and_methods = fields_and_methods() for elt in fields_and_methods: if elt is ...: output_fields.update(fields) continue if isinstance(elt, tuple): elt, metadata = elt else: metadata = empty_dict if not isinstance(metadata, Mapping): raise TypeError(f"Invalid metadata {metadata}") if isinstance(elt, Field): elt = elt.name if isinstance(elt, str) and elt in fields: elt = fields[elt] if is_method(elt): elt = method_wrapper(elt) if isinstance(elt, ObjectField): if metadata: output_fields[elt.name] = replace(elt, metadata={ **elt.metadata, **metadata }, default=MISSING_DEFAULT) else: output_fields[elt.name] = elt continue elif callable(elt): types = get_type_hints(elt) first_param = next(iter(inspect.signature(elt).parameters)) substitution, _ = subtyping_substitution( types.get(first_param, with_parameters(cls)), cls) ret = substitute_type_vars(types.get("return", Any), substitution) output_fields[elt.__name__] = ObjectField(elt.__name__, ret, metadata=metadata) methods.append((elt, output_fields[elt.__name__])) else: raise TypeError( f"Invalid serialization member {elt} for class {cls}") serialized_methods = [m for m, f in methods if output_fields[f.name] is f] serialized_fields = list(output_fields.keys() - {m.__name__ for m in serialized_methods}) def __init__(self, obj): for field in serialized_fields: setattr(self, field, getattr(obj, field)) for method in serialized_methods: setattr(self, method.__name__, method(obj)) return tuple(output_fields.values()), __init__
def object( self, tp: AnyType, fields: Sequence[ObjectField], resolvers: Sequence[ResolverField] = (), ) -> TypeFactory[graphql.GraphQLOutputType]: cls = get_origin_or_type(tp) all_fields = { f.alias: self._field(f) for f in fields if not f.is_aggregate } name_by_aliases = {f.alias: f.name for f in fields} all_fields.update({r.alias: self._resolver(r) for r in resolvers}) name_by_aliases.update( {r.alias: r.resolver.func.__name__ for r in resolvers}) for alias, (resolver, types) in get_resolvers(tp).items(): resolver_field = ResolverField( alias, resolver, types, resolver.parameters, resolver.parameters_metadata, ) all_fields[alias] = self._resolver(resolver_field) name_by_aliases[alias] = resolver.func.__name__ sorted_fields = sort_by_annotations_position( cls, all_fields, name_by_aliases.__getitem__) visited_fields = OrderedDict( (self.aliaser(a), all_fields[a]) for a in sorted_fields) flattened_types = { f.name: self._visit_flattened(f) for f in fields if f.flattened } def field_thunk() -> graphql.GraphQLFieldMap: return merge_fields(cls, visited_fields, flattened_types) interfaces = list(map(self.visit, get_interfaces(cls))) interface_thunk = None if interfaces: def interface_thunk() -> Collection[graphql.GraphQLInterfaceType]: result = { cast(graphql.GraphQLInterfaceType, i.raw_type) for i in interfaces } for flattened_factory in flattened_types.values(): flattened = cast( Union[graphql.GraphQLObjectType, graphql.GraphQLInterfaceType], flattened_factory.raw_type, ) result.update(flattened.interfaces) return sorted(result, key=lambda i: i.name) def factory( name: Optional[str], description: Optional[str] ) -> Union[graphql.GraphQLObjectType, graphql.GraphQLInterfaceType]: name = unwrap_name(name, cls) if is_interface(cls): return graphql.GraphQLInterfaceType(name, field_thunk, interface_thunk, description=description) else: return graphql.GraphQLObjectType( name, field_thunk, interface_thunk, is_type_of=lambda obj, _: isinstance(obj, cls), description=description, ) return TypeFactory(factory)
def object_fields(tp: AnyType) -> Mapping[str, ObjectField]: try: return OrderedDict((f.name, f) for f in GetFields().visit(tp)) except Unsupported: raise TypeError(f"{tp} doesn't have fields")
def method(data: Any) -> Any: if not isinstance(data, dict): raise bad_type(data, dict) values: Dict[str, Any] = {} aliases: List[str] = [] errors = list( constraint_errors(data)) if constraint_errors else [] field_errors: Dict[ErrorKey, ValidationError] = OrderedDict() for ( name, alias, field_method, required, fall_back_on_default, ) in normal_fields: if alias in data: aliases.append(alias) try: values[name] = field_method(data[alias]) except ValidationError as err: if not fall_back_on_default: field_errors[alias] = err elif not required: pass elif required is True: field_errors[alias] = MISSING_PROPERTY else: assert isinstance(required, AbstractSet) requiring = required & data.keys() if requiring: msg = f"missing property (required by {sorted(requiring)})" field_errors[alias] = ValidationError([msg]) if has_aggregate_field: for ( name, flattened_alias, field_method, fall_back_on_default, ) in flattened_fields: flattened = { alias: data[alias] for alias in flattened_alias if alias in data } aliases.extend(flattened) try: values[name] = field_method(flattened) except ValidationError as err: if not fall_back_on_default: errors.extend(err.messages) field_errors.update(err.children) if len(data) != len(aliases): remain = data.keys() - set(aliases) else: remain = set() for ( name, pattern, field_method, fall_back_on_default, ) in pattern_fields: matched = { key: data[key] for key in remain if pattern.match(key) } remain -= matched.keys() try: values[name] = field_method(matched) except ValidationError as err: if not fall_back_on_default: errors.extend(err.messages) field_errors.update(err.children) if additional_field is not None: name, field_method, fall_back_on_default = additional_field additional = {key: data[key] for key in remain} try: values[name] = field_method(additional) except ValidationError as err: if not fall_back_on_default: errors.extend(err.messages) field_errors.update(err.children) elif remain and not additional_properties: for key in remain: field_errors[key] = UNEXPECTED_PROPERTY elif len(data) != len(aliases) and not additional_properties: for key in data.keys() - set(aliases): field_errors[key] = UNEXPECTED_PROPERTY validators2: Sequence[Validator] if validators: init: Dict[str, Any] = {} for name, default_factory in init_defaults: if name in values: init[name] = values[name] elif name not in field_errors: assert default_factory is not None init[name] = default_factory() # Don't keep validators when all dependencies are default validators2 = [ v for v in validators if v.dependencies & values.keys() ] if field_errors or errors: error = ValidationError(errors, field_errors) invalid_fields = field_errors.keys( ) | post_init_modified validators2 = [ v for v in validators2 if not v.dependencies & invalid_fields ] try: validate( ValidatorMock(cls, values), validators2, init, aliaser=aliaser, ) except ValidationError as err: error = merge_errors(error, err) raise error elif field_errors or errors: raise ValidationError(errors, field_errors) else: validators2, init = ( ), ... # type: ignore # only for linter try: res = cls(**values) except (AssertionError, ValidationError): raise except TypeError as err: if str(err).startswith("__init__() got"): raise Unsupported(cls) else: raise ValidationError([str(err)]) except Exception as err: raise ValidationError([str(err)]) if validators2: validate(res, validators2, init, aliaser=aliaser) return res
def dataclass( self, cls: Type, types: Mapping[str, AnyType], fields: Sequence[Field], init_vars: Sequence[Field], ) -> JsonSchema: assert is_dataclass(cls) self._check_constraints(ObjectConstraints) properties = {} required: List[str] = [] merged_schemas = [] pattern_properties = {} additional_properties: Union[ bool, JsonSchema] = settings.additional_properties for field in get_fields(fields, init_vars, self.operation): metadata = check_metadata(field) field_type = types[field.name] if MERGED_METADATA in metadata: self._check_merged_schema(cls, field, field_type) merged_schemas.append(self.visit_field(field, field_type)) elif PROPERTIES_METADATA in metadata: pattern = metadata[PROPERTIES_METADATA] properties_schema = self._properties_schema(field, field_type) if pattern is None: additional_properties = properties_schema elif pattern is ...: pattern_properties[infer_pattern( field_type)] = properties_schema else: pattern_properties[pattern] = properties_schema else: alias = self.aliaser(get_alias(field)) properties[alias] = json_schema( readOnly=not field.init, writeOnly=field in init_vars, **self.visit_field(field, field_type), ) if is_required(field): required.append(alias) if self.operation == Operation.SERIALIZATION: for name, resolver in get_serialized_resolvers(cls).items(): with self._replace_conversions(resolver.conversions): properties[self.aliaser(name)] = json_schema( readOnly=True, **self.visit_with_schema(resolver.return_type, resolver.schema), ) dependent_required = { self.aliaser(get_alias(field)): sorted(self.aliaser(get_alias(req)) for req in required_by) for field, required_by in get_requirements( cls, DependentRequired.requiring, self.operation).items() } result = json_schema( type=JsonType.OBJECT, properties=properties, required=required, additionalProperties=additional_properties, patternProperties=pattern_properties, dependentRequired=OrderedDict((f, dependent_required[f]) for f in sorted(dependent_required)), ) if merged_schemas: result = json_schema( type=JsonType.OBJECT, allOf=[result, *merged_schemas], unevaluatedProperties=False, ) return result