Exemplo n.º 1
0
def test_field_name_unchanged(name):
    field_def = {name: 'default_string'}
    MyRecord = create_model('MyRecord', __base__=KafkaRecord, **field_def)
    schema = MyRecord(a=True).schema()

    name_in_schema = schema['fields'][0]['name']
    assert name_in_schema == name
Exemplo n.º 2
0
def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any:
    from pydantic.main import create_model

    if type_name is None:
        type_name = _generate_parsing_type_name
    if not isinstance(type_name, str):
        type_name = type_name(type_)
    return create_model(type_name, __root__=(type_, ...))
Exemplo n.º 3
0
    def __class_getitem__(  # type: ignore
            cls: Type[GenericModelT],
            params: Union[Type[Any], Tuple[Type[Any],
                                           ...]]) -> Type[BaseModel]:
        cached = _generic_types_cache.get((cls, params))
        if cached is not None:
            return cached
        if cls.__concrete__:
            raise TypeError(
                'Cannot parameterize a concrete instantiation of a generic model'
            )
        if not isinstance(params, tuple):
            params = (params, )
        if any(isinstance(param, TypeVar) for param in params):  # type: ignore
            raise TypeError(
                f'Type parameters should be placed on typing.Generic, not GenericModel'
            )
        if Generic not in cls.__bases__:
            raise TypeError(
                f'Type {cls.__name__} must inherit from typing.Generic before being parameterized'
            )

        check_parameters_count(cls, params)
        typevars_map: Dict[Any, Any] = dict(zip(cls.__parameters__,
                                                params))  # type: ignore
        type_hints = get_type_hints(cls).items()
        instance_type_hints = {
            k: v
            for k, v in type_hints
            if getattr(v, '__origin__', None) is not ClassVar
        }
        concrete_type_hints: Dict[str, Type[Any]] = {
            k: resolve_type_hint(v, typevars_map)
            for k, v in instance_type_hints.items()
        }

        model_name = concrete_name(cls, params)
        validators = gather_validators(cls)
        fields: Dict[str, Tuple[Type[Any], Any]] = {
            k: (v, cls.__fields__[k].default)
            for k, v in concrete_type_hints.items() if k in cls.__fields__
        }
        created_model = create_model(
            model_name=model_name,
            __module__=cls.__module__,
            __base__=cls,
            __config__=None,
            __validators__=validators,
            **fields,
        )
        created_model.Config = cls.Config
        created_model.__concrete__ = True  # type: ignore
        _generic_types_cache[(cls, params)] = created_model
        if len(params) == 1:
            _generic_types_cache[(cls, params[0])] = created_model
        return created_model
Exemplo n.º 4
0
def build_model_from_pb(name, pb_model):
    from google.protobuf.json_format import MessageToDict

    dp = MessageToDict(pb_model(), including_default_value_fields=True)

    all_fields = {k: (name if k in ('chunks', 'matches') else type(v), Field(default=v)) for k, v in dp.items()}
    if pb_model == QueryLangProto:
        all_fields['parameters'] = (Dict, Field(default={}))

    return create_model(name, **all_fields)
Exemplo n.º 5
0
    def EditSchema(cls):
        edit_fields = {**cls.__fields__}
        for key, value in edit_fields.items():
            if isinstance(value, ModelField):
                edit_fields[key] = (value.type_, Field(None))
        Schema = create_model("EditSchema", **edit_fields)

        class EditSchema(Schema, cls):
            pass
        return EditSchema
Exemplo n.º 6
0
def test_optional_field_type(python_type, avro_type):
    MyRecord = create_model('MyRecord',
                            a=(Optional[python_type], None),
                            __base__=KafkaRecord)

    record = MyRecord()
    schema = record.schema()
    field_type = schema['fields'][0]['type']

    assert field_type == ['null', avro_type]
Exemplo n.º 7
0
def test_field_type(python_type, avro_type):
    MyRecord = create_model('MyRecord',
                            a=(python_type, ...),
                            __base__=KafkaRecord)

    record = MyRecord(a=python_type(1))
    schema = record.schema()
    field_type = schema['fields'][0]['type']

    assert field_type == avro_type
Exemplo n.º 8
0
def test_top_level_record_schema():
    MyRecord = create_model('MyRecord', a=(int, ...), __base__=KafkaRecord)
    schema = MyRecord(a=10).schema()

    expected = {
        'type': 'record',
        'name': 'MyRecord',
        'namespace': 'python.kafka.myrecord',
        'fields': [
            {
                'type': 'int',
                'name': 'a'
            },
        ]
    }

    assert schema == expected
Exemplo n.º 9
0
    def arguments_to_pydantic(self) -> Type[pydantic.BaseModel]:
        """
        Convert the method arguments to a pydantic model that allows to validate a message body with pydantic
        """
        sig = inspect.signature(self.function)

        def to_tuple(param: Parameter):
            if param.annotation is Parameter.empty:
                return (Any, param.default
                        if param.default is not Parameter.empty else None)
            if param.default is not Parameter.empty:
                return (param.annotation, param.default)
            else:
                return (param.annotation, None)

        return create_model(
            f"{self.function.__name__}_arguments", **{
                param.name: to_tuple(param)
                for param in sig.parameters.values()
            })
Exemplo n.º 10
0
def _get_parsing_type(type_: Any, source: str) -> Any:
    from pydantic.main import create_model

    type_name = getattr(type_, "__name__", str(type_))
    return create_model(f"ParsingModel[{type_name}] (for {source})", obj=(type_, ...))
Exemplo n.º 11
0
import json
import platform
from enum import Enum
from typing import List

import pytest
from pydantic.main import create_model

from ..jsf.parser import JSF

Object = create_model("Object")

expected = [
    ("boolean", bool),
    ("enum", Enum),
    ("inner-ref", Object),
    ("integer", int),
    ("null", type(None)),
    ("number", float),
    ("object", Object),
    ("custom", Object),
    ("string-enum", Enum),
    ("string", str),
    ("tuple", tuple),
]
if int(platform.python_version_tuple()[1]) < 9:
    expected.append(("array", List))

else:
    from typing import _GenericAlias
Exemplo n.º 12
0
def test_field_with_default(python_value, avro_value):
    MyRecord = create_model('MyRecord', a=python_value, __base__=KafkaRecord)
    schema = MyRecord().schema()
    default_value = schema['fields'][0]['default']

    assert default_value == avro_value