def test_base_model_reserved_name(): field = DataModelField(name='except', data_type=DataType(type='str'), required=True) base_model = BaseModel( fields=[field], reference=Reference(name='test_model', path='test_model'), ) assert base_model.name == 'test_model' assert base_model.fields == [field] assert base_model.decorators == [] assert (base_model.render() == """class test_model(BaseModel): except_: str = Field(..., alias='except')""") field = DataModelField(name='def', data_type=DataType(type='str'), required=True, alias='def-field') base_model = BaseModel( fields=[field], reference=Reference(name='test_model', path='test_model'), ) assert base_model.name == 'test_model' assert base_model.fields == [field] assert base_model.decorators == [] assert (base_model.render() == """class test_model(BaseModel): def_: str = Field(..., alias='def-field')""")
def test_data_type_type_hint(): assert DataType(type='str').type_hint == 'str' assert DataType(type='constr', is_func=True).type_hint == 'constr()' assert ( DataType(type='constr', is_func=True, kwargs={'min_length': 10}).type_hint == 'constr(min_length=10)' )
def test_sort_data_models(): reference_a = Reference(path='A', original_name='A', name='A') reference_b = Reference(path='B', original_name='B', name='B') reference_c = Reference(path='C', original_name='C', name='C') data_type_a = DataType(reference=reference_a) data_type_b = DataType(reference=reference_b) data_type_c = DataType(reference=reference_c) reference = [ BaseModel( fields=[ DataModelField(data_type=data_type_a), DataModelFieldBase(data_type=data_type_c), ], reference=reference_a, ), BaseModel( fields=[DataModelField(data_type=data_type_b)], reference=reference_b, ), BaseModel( fields=[DataModelField(data_type=data_type_b)], reference=reference_c, ), ] unresolved, resolved, require_update_action_models = sort_data_models( reference) expected = OrderedDict() expected['B'] = reference[1] expected['C'] = reference[2] expected['A'] = reference[0] assert resolved == expected assert unresolved == [] assert require_update_action_models == ['B', 'A']
def test_get_data_type_array(schema_types, result_types): parser = JsonSchemaParser('') assert parser.get_data_type(JsonSchemaObject(type=schema_types)) == DataType( data_types=[DataType(type=r,) for r in result_types], is_optional='null' in schema_types, imports=[IMPORT_OPTIONAL] if 'null' in schema_types else [], )
def get_data_float_type(types: Types, **kwargs: Any) -> DataType: data_type_kwargs: Dict[str, str] = {} if kwargs.get('maximum') is not None: data_type_kwargs['gt'] = kwargs['maximum'] if kwargs.get('exclusiveMaximum') is not None: data_type_kwargs['ge'] = kwargs['exclusiveMaximum'] if kwargs.get('minimum') is not None: data_type_kwargs['lt'] = kwargs['minimum'] if kwargs.get('exclusiveMinimum') is not None: data_type_kwargs['le'] = kwargs['exclusiveMinimum'] if kwargs.get('multipleOf') is not None: data_type_kwargs['multiple_of'] = kwargs['multipleOf'] if data_type_kwargs: if len(data_type_kwargs) == 1 and data_type_kwargs.get('le') == 0: return DataType(type='PositiveFloat') if len(data_type_kwargs) == 1 and data_type_kwargs.get('ge') == 0: return DataType(type='NegativeFloat') return DataType( type='confloat', is_func=True, kwargs=data_type_kwargs, imports_=[IMPORT_CONFLOAT], ) return type_map[types]
def get_data_float_type(types: Types, **kwargs: Any) -> DataType: data_type_kwargs = transform_kwargs(kwargs, number_kwargs) if data_type_kwargs: if data_type_kwargs == {'gt': 0}: return DataType(type='PositiveFloat') if data_type_kwargs == {'lt': 0}: return DataType(type='NegativeFloat') return DataType( type='confloat', is_func=True, kwargs=data_type_kwargs, imports_=[IMPORT_CONFLOAT], ) return type_map[types]
def get_data_float_type(types: Types, **kwargs: Any) -> DataType: data_type_kwargs = transform_kwargs(kwargs, number_kwargs) if data_type_kwargs: if data_type_kwargs == {'gt': 0}: return DataType(type='PositiveFloat', imports_=[IMPORT_POSITIVE_FLOAT]) if data_type_kwargs == {'lt': 0}: return DataType(type='NegativeFloat', imports_=[IMPORT_NEGATIVE_FLOAT]) return DataType( type='confloat', is_func=True, kwargs={k: float(v) for k, v in data_type_kwargs.items()}, imports_=[IMPORT_CONFLOAT], ) return type_map[types]
def test_custom_root_type_required(): custom_root_type = CustomRootType( fields=[DataModelFieldBase(data_type=DataType(type='str'), required=True)], reference=Reference(name='test_model', path='test_model'), ) assert custom_root_type.name == 'test_model' assert custom_root_type.fields == [ DataModelFieldBase(data_type=DataType(type='str'), required=True) ] assert custom_root_type.render() == ( 'class test_model(BaseModel):\n' ' __root__: str' )
def get_data_int_type(types: Types, **kwargs: Any) -> DataType: data_type_kwargs = transform_kwargs(kwargs, number_kwargs) if data_type_kwargs: if data_type_kwargs == {'gt': 0}: return DataType(type='PositiveInt') if data_type_kwargs == {'lt': 0}: return DataType(type='NegativeInt') return DataType( type='conint', is_func=True, kwargs={k: int(v) for k, v in data_type_kwargs.items()}, imports_=[IMPORT_CONINT], ) return type_map[types]
def test_data_model(): field = DataModelFieldBase(name='a', data_type=DataType(type='str'), default="" 'abc' "", required=True) with NamedTemporaryFile('w', delete=False) as dummy_template: dummy_template.write(template) dummy_template.seek(0) dummy_template.close() B.TEMPLATE_FILE_PATH = dummy_template.name data_model = B( fields=[field], decorators=['@validate'], base_classes=[ Reference(path='base', original_name='base', name='Base') ], reference=Reference(path='test_model', name='test_model'), ) assert data_model.name == 'test_model' assert data_model.fields == [field] assert data_model.decorators == ['@validate'] assert data_model.base_class == 'Base' assert (data_model.render() == '@validate\n' '@dataclass\n' 'class test_model:\n' ' a: str')
def test_data_model(): field = DataModelField(name='a', data_types=[DataType(type='str')], default="" 'abc' "", required=True) with NamedTemporaryFile('w') as dummy_template: dummy_template.write(template) dummy_template.seek(0) B.TEMPLATE_FILE_PATH = dummy_template.name data_model = B( name='test_model', fields=[field], decorators=['@validate'], base_classes=['Base'], ) assert data_model.name == 'test_model' assert data_model.fields == [field] assert data_model.decorators == ['@validate'] assert data_model.base_class == 'Base' assert (data_model.render() == '@validate\n' '@dataclass\n' 'class test_model:\n' ' a: str')
def test_data_model_exception(): field = DataModelFieldBase(name='a', data_type=DataType(type='str'), default="" 'abc' "", required=True) with pytest.raises(Exception, match='TEMPLATE_FILE_PATH is undefined'): C(name='abc', fields=[field])
def test_get_data_type_array(schema_types, result_types): parser = JsonSchemaParser(BaseModel, CustomRootType) assert parser.get_data_type(JsonSchemaObject(type=schema_types)) == [ DataType( type=r, optional='null' in schema_types, imports_=[IMPORT_OPTIONAL] if 'null' in schema_types else None, ) for r in result_types ]
def test_custom_root_type_decorator(): custom_root_type = CustomRootType( name='test_model', fields=[ DataModelFieldBase(data_type=DataType(type='str'), required=True) ], decorators=['@validate'], base_classes=['Base'], ) assert custom_root_type.name == 'test_model' assert custom_root_type.fields == [ DataModelFieldBase(data_type=DataType(type='str'), required=True) ] assert custom_root_type.base_class == 'Base' assert (custom_root_type.render() == '@validate\n' 'class test_model(Base):\n' ' __root__: str')
def test_base_model(): field = DataModelField(name='a', data_types=[DataType(type='str')], required=True) base_model = BaseModel(name='test_model', fields=[field]) assert base_model.name == 'test_model' assert base_model.fields == [field] assert base_model.decorators == [] assert base_model.render() == 'class test_model(BaseModel):\n' ' a: str'
def test_sort_data_models_unresolved(): reference_a = Reference(path='A', original_name='A', name='A') reference_b = Reference(path='B', original_name='B', name='B') reference_c = Reference(path='C', original_name='C', name='C') reference_d = Reference(path='D', original_name='D', name='D') reference_v = Reference(path='V', original_name='V', name='V') reference_z = Reference(path='Z', original_name='Z', name='Z') data_type_a = DataType(reference=reference_a) data_type_b = DataType(reference=reference_b) data_type_c = DataType(reference=reference_c) data_type_v = DataType(reference=reference_v) data_type_z = DataType(reference=reference_z) reference = [ BaseModel( fields=[ DataModelField(data_type=data_type_a), DataModelFieldBase(data_type=data_type_c), ], reference=reference_a, ), BaseModel( fields=[DataModelField(data_type=data_type_b)], reference=reference_b, ), BaseModel( fields=[DataModelField(data_type=data_type_b)], reference=reference_c, ), BaseModel( fields=[ DataModelField(data_type=data_type_a), DataModelField(data_type=data_type_c), DataModelField(data_type=data_type_z), ], reference=reference_d, ), BaseModel( fields=[DataModelField(data_type=data_type_v)], reference=reference_z, ), ] with pytest.raises(Exception): sort_data_models(reference)
def get_data_decimal_type(types: Types, **kwargs: Any) -> DataType: data_type_kwargs = transform_kwargs(kwargs, number_kwargs) if data_type_kwargs: return DataType( type='condecimal', is_func=True, kwargs={k: Decimal(v) for k, v in data_type_kwargs.items()}, imports_=[IMPORT_CONDECIMAL], ) return type_map[types]
def get_data_str_type(types: Types, **kwargs: Any) -> DataType: data_type_kwargs = transform_kwargs(kwargs, string_kwargs) if data_type_kwargs: return DataType( type='constr', is_func=True, kwargs=data_type_kwargs, imports_=[IMPORT_CONSTR], ) return type_map[types]
def test_get_data_type(schema_type, schema_format, result_type, from_, import_): if from_ and import_: imports: Optional[List[Import]] = [Import(from_=from_, import_=import_)] else: imports = [] parser = JsonSchemaParser('') assert parser.get_data_type( JsonSchemaObject(type=schema_type, format=schema_format) ) == DataType(type=result_type, imports=imports)
def test_data_class(): field = DataModelFieldBase( name='a', data_types=[DataType(type='str')], required=True ) data_class = DataClass(name='test_model', fields=[field]) assert data_class.name == 'test_model' assert data_class.fields == [field] assert data_class.decorators == [] assert data_class.render() == '@dataclass\n' 'class test_model:\n' ' a: str'
def get_data_str_type(types: Types, **kwargs: Any) -> DataType: data_type_kwargs: Dict[str, str] = {} if kwargs.get('pattern') is not None: data_type_kwargs['regex'] = kwargs['pattern'] if kwargs.get('minLength') is not None: data_type_kwargs['min_length'] = kwargs['minLength'] if kwargs.get('maxLength') is not None: data_type_kwargs['max_length'] = kwargs['maxLength'] if data_type_kwargs: return DataType(type='constr', is_func=True, kwargs=data_type_kwargs) return type_map[types]
def get_data_str_type(types: Types, **kwargs: Any) -> DataType: data_type_kwargs = transform_kwargs(kwargs, string_kwargs) if data_type_kwargs: if 'regex' in data_type_kwargs: data_type_kwargs['regex'] = f'\'{data_type_kwargs["regex"]}\'' return DataType( type='constr', is_func=True, kwargs=data_type_kwargs, imports_=[IMPORT_CONSTR], ) return type_map[types]
def test_get_data_type(schema_type, schema_format, result_type, from_, import_): if from_ and import_: import_: Optional[Import] = Import(from_=from_, import_=import_) else: import_ = None parser = JsonSchemaParser('') assert (parser.get_data_type( JsonSchemaObject(type=schema_type, format=schema_format)).dict() == DataType( type=result_type, import_=import_).dict())
def test_base_model_optional(): field = DataModelField(name='a', data_types=[DataType(type='str')], default="'abc'", required=False) base_model = BaseModel(name='test_model', fields=[field]) assert base_model.name == 'test_model' assert base_model.fields == [field] assert base_model.decorators == [] assert (base_model.render() == 'class test_model(BaseModel):\n' ' a: Optional[str] = \'abc\'')
def test_data_class_optional(): field = DataModelField(name='a', data_types=[DataType(type='str')], default="'abc'", required=True) data_class = DataClass(name='test_model', fields=[field]) assert data_class.name == 'test_model' assert data_class.fields == [field] assert data_class.decorators == [] assert (data_class.render() == '@dataclass\n' 'class test_model:\n' ' a: str = \'abc\'')
def test_custom_root_type(): custom_root_type = CustomRootType( fields=[ DataModelFieldBase( name='a', data_type=DataType(type='str'), default='abc', required=False, ) ], reference=Reference(name='test_model', path='test_model'), ) assert custom_root_type.name == 'test_model' assert custom_root_type.fields == [ DataModelFieldBase( name='a', data_type=DataType(type='str'), default='abc', required=False ) ] assert custom_root_type.render() == ( 'class test_model(BaseModel):\n' ' __root__: Optional[str] = \'abc\'' )
def test_get_data_type(schema_type, schema_format, result_type, from_, import_): if from_ and import_: imports_: Optional[List[Import]] = [ Import(from_=from_, import_=import_) ] else: imports_ = None parser = JsonSchemaParser(BaseModel, CustomRootType) assert parser.get_data_type( JsonSchemaObject(type=schema_type, format=schema_format)) == [ DataType(type=result_type, imports_=imports_) ]
def parse_responses( self, name: str, responses: Dict[str, Union[ResponseObject, ReferenceObject]], path: List[str], ) -> Dict[str, Dict[str, DataType]]: data_types = super().parse_responses(name, responses, path) status_code_200 = data_types.get('200') if status_code_200: data_type = list(status_code_200.values())[0] if data_type: self.data_types.append(data_type) else: data_type = DataType(type='None') type_hint = data_type.type_hint # TODO: change to lazy loading self._temporary_operation['response'] = type_hint return_types = {type_hint: data_type} for status_code, additional_responses in data_types.items(): if status_code != '200' and additional_responses: # 200 is processed above data_type = list(additional_responses.values())[0] if data_type: self.data_types.append(data_type) type_hint = data_type.type_hint # TODO: change to lazy loading self._temporary_operation.setdefault('additional_responses', {})[status_code] = { 'model': type_hint } return_types[type_hint] = data_type if len(return_types) == 1: return_type = next(iter(return_types.values())) else: return_type = DataType(data_types=list(return_types.values())) if return_type: self.data_types.append(return_type) self._temporary_operation['return_type'] = return_type.type_hint return data_types
def test_data_class_base_class(): field = DataModelFieldBase(name='a', data_type=DataType(type='str'), required=True) data_class = DataClass( fields=[field], base_classes=[Reference(name='Base', original_name='Base', path='Base')], reference=Reference(name='test_model', path='test_model'), ) assert data_class.name == 'test_model' assert data_class.fields == [field] assert data_class.decorators == [] assert ( data_class.render() == '@dataclass\n' 'class test_model(Base):\n' ' a: str' )
def test_base_model_optional(): field = DataModelField(name='a', data_type=DataType(type='str'), default='abc', required=False) base_model = BaseModel( fields=[field], reference=Reference(name='test_model', path='test_model'), ) assert base_model.name == 'test_model' assert base_model.fields == [field] assert base_model.decorators == [] assert (base_model.render() == 'class test_model(BaseModel):\n' ' a: Optional[str] = \'abc\'')