def test_openapi_parser_parse_enum_models(): parser = OpenAPIParser( Path(DATA_PATH / 'enum_models.yaml').read_text(), data_model_field_type=DataModelFieldBase, ) expected_dir = EXPECTED_OPEN_API_PATH / 'openapi_parser_parse_enum_models' assert parser.parse() == (expected_dir / 'output_py37.py').read_text() parser = OpenAPIParser( Path(DATA_PATH / 'enum_models.yaml').read_text(), target_python_version=PythonVersion.PY_36, ) assert parser.parse() == (expected_dir / 'output_py36.py').read_text()
def test_openapi_parser_parse_enum_models(): parser = OpenAPIParser(BaseModel, CustomRootType, text=Path(DATA_PATH / 'enum_models.yaml').read_text()) expected_dir = EXPECTED_OPEN_API_PATH / 'openapi_parser_parse_enum_models' assert parser.parse() == (expected_dir / 'output_py37.py').read_text() parser = OpenAPIParser( BaseModel, CustomRootType, text=Path(DATA_PATH / 'enum_models.yaml').read_text(), target_python_version=PythonVersion.PY_36, ) assert parser.parse() == (expected_dir / 'output_py36.py').read_text()
def test_parse_root_type(source_obj, generated_classes): parser = OpenAPIParser(BaseModel, CustomRootType) parsed_templates = parser.parse_root_type( 'Name', JsonSchemaObject.parse_obj(source_obj)) assert dump_templates(list(parsed_templates)) == generated_classes
def test_parse_object(source_obj, generated_classes): parser = OpenAPIParser(BaseModel, CustomRootType) parser.parse_object('Pets', JsonSchemaObject.parse_obj(source_obj), []) assert dump_templates(list(parser.results)) == generated_classes
def test_get_data_type_invalid_obj(): with pytest.raises(ValueError, match='invalid schema object'): parser = OpenAPIParser(BaseModel, CustomRootType) assert parser.get_data_type(JsonSchemaObject())
def test_get_data_type_array(schema_types, result_types): parser = OpenAPIParser(BaseModel, CustomRootType) assert parser.get_data_type(JsonSchemaObject(type=schema_types)) == [ DataType(type=r) for r in result_types ]
def test_openapi_parser_parse_allof(): parser = OpenAPIParser(Path(DATA_PATH / 'allof.yaml'), ) assert (parser.parse() == (EXPECTED_OPEN_API_PATH / 'openapi_parser_parse_allof' / 'output.py').read_text())
def test_parse_root_type(source_obj, generated_classes): parser = OpenAPIParser('') parser.parse_root_type('Name', JsonSchemaObject.parse_obj(source_obj), []) assert dump_templates(list(parser.results)) == generated_classes
def test_openapi_parser_parse_duplicate_models(): parser = OpenAPIParser( BaseModel, CustomRootType, text=Path(DATA_PATH / 'duplicate_models.yaml').read_text(), ) assert (parser.parse() == '''from __future__ import annotations from typing import List, Optional from pydantic import BaseModel class Pet(BaseModel): id: int name: str tag: Optional[str] = None class Pets(BaseModel): __root__: List[Pet] class Error(BaseModel): code: int message: str class Event(BaseModel): name: Optional[str] = None class Result(BaseModel): event: Optional[Event] = None class Events(BaseModel): __root__: List[Event] class EventRoot(BaseModel): __root__: Event class EventObject(BaseModel): event: Optional[Event] = None class DuplicateObject1(BaseModel): event: Optional[List[Event]] = None class Event_1(BaseModel): event: Optional[Event] = None class DuplicateObject2(BaseModel): event: Optional[Event_1] = None class DuplicateObject3(BaseModel): __root__: Event ''')
def render(self): files = {} for file in self.spec['files']: # (<file name>, (<contents>, <errors>)) model_files = {} model_data = [] defaults_file_lines = [] deprecation_data = defaultdict(dict) defaults_file_needs_dynamic_values = False defaults_file_needs_value_normalization = False for section in sorted(file['options'], key=lambda s: s['name']): errors = [] section_name = section['name'] if section_name == 'init_config': model_id = 'shared' model_file_name = f'{model_id}.py' schema_name = 'SharedConfig' elif section_name == 'instances': model_id = 'instance' model_file_name = f'{model_id}.py' schema_name = 'InstanceConfig' # Skip anything checks don't use directly else: continue model_data.append((model_id, schema_name)) # We want to create something like: # # paths: # /instance: # get: # responses: # '200': # content: # application/json: # schema: # $ref: '#/components/schemas/InstanceConfig' # components: # schemas: # InstanceConfig: # required: # - endpoint # properties: # endpoint: # ... # timeout: # ... # ... openapi_document = { 'paths': { f'/{model_id}': { 'get': { 'responses': { '200': { 'content': { 'application/json': { 'schema': { '$ref': f'#/components/schemas/{schema_name}' } } } } } } } }, 'components': { 'schemas': {} }, } schema = openapi_document['components']['schemas'][ schema_name] = {} options = schema['properties'] = {} required_options = schema['required'] = [] options_with_defaults = False validator_data = [] for option in sorted(section['options'], key=lambda o: o['name']): option_name = option['name'] normalized_option_name = normalize_option_name(option_name) if 'value' in option: type_data = option['value'] # Some integrations (like `mysql`) have options that are grouped under a top-level option elif 'options' in option: nested_properties = [] type_data = { 'type': 'object', 'properties': nested_properties } for nested_option in option['options']: nested_type_data = nested_option['value'] # Remove fields that aren't part of the OpenAPI specification for extra_field in set( nested_type_data) - ALLOWED_TYPE_FIELDS: nested_type_data.pop(extra_field, None) nested_properties.append({ 'name': nested_option['name'], **nested_type_data }) else: errors.append( f'Option `{option_name}` must have a `value` or `options` attribute' ) continue if option['deprecation']: deprecation_data[model_id][option_name] = option[ 'deprecation'] options[option_name] = type_data if option['required']: required_options.append(option_name) else: options_with_defaults = True defaults_file_lines.append('') defaults_file_lines.append('') defaults_file_lines.append( f'def {model_id}_{normalized_option_name}(field, value):' ) default_value = get_default_value(type_data) if default_value is not NO_DEFAULT: defaults_file_needs_value_normalization = True defaults_file_lines.append( f' return {default_value!r}') else: defaults_file_needs_dynamic_values = True defaults_file_lines.append( ' return get_default_field_value(field, value)' ) validators = type_data.pop('validators', []) if not isinstance(validators, list): errors.append( f'Config spec property `{option_name}.value.validators` must be an array' ) elif validators: for i, import_path in enumerate(validators, 1): if not isinstance(import_path, str): errors.append( f'Entry #{i} of config spec property `{option_name}.value.validators` ' f'must be a string') break else: validator_data.append( (normalized_option_name, validators)) # Remove fields that aren't part of the OpenAPI specification for extra_field in set(type_data) - ALLOWED_TYPE_FIELDS: type_data.pop(extra_field, None) sanitize_openapi_object_properties(type_data) try: parser = OpenAPIParser( yaml.safe_dump(openapi_document), target_python_version=PythonVersion.PY_38, enum_field_as_literal=LiteralType.All, encoding='utf-8', use_generic_container_types=True, enable_faux_immutability=True, # TODO: uncomment when the Agent upgrades Python to 3.9 # use_standard_collections=True, strip_default_none=True, # https://github.com/koxudaxi/datamodel-code-generator/pull/173 field_constraints=True, ) model_file_contents = parser.parse() except Exception as e: errors.append( f'Error parsing the OpenAPI schema `{schema_name}`: {e}' ) model_files[model_file_name] = ('', errors) continue model_file_lines = model_file_contents.splitlines() add_imports(model_file_lines, options_with_defaults, len(deprecation_data)) if model_id in deprecation_data: model_file_lines.append('') model_file_lines.append(' @root_validator(pre=True)') model_file_lines.append( ' def _handle_deprecations(cls, values):') model_file_lines.append( f' validation.utils.handle_deprecations(' f'{section_name!r}, deprecations.{model_id}(), values)' ) model_file_lines.append(' return values') model_file_lines.append('') model_file_lines.append(' @root_validator(pre=True)') model_file_lines.append( ' def _initial_validation(cls, values):') model_file_lines.append( f" return validation.core.initialize_config(" f"getattr(validators, 'initialize_{model_id}', identity)(values))" ) model_file_lines.append('') model_file_lines.append( " @validator('*', pre=True, always=True)") model_file_lines.append( ' def _ensure_defaults(cls, v, field):') model_file_lines.append( ' if v is not None or field.required:') model_file_lines.append(' return v') model_file_lines.append('') model_file_lines.append( f" return getattr(defaults, f'{model_id}_{{field.name}}')(field, v)" ) model_file_lines.append('') model_file_lines.append(" @validator('*')") model_file_lines.append( ' def _run_validations(cls, v, field):') # TODO: remove conditional when there is a workaround: # https://github.com/samuelcolvin/pydantic/issues/2376 model_file_lines.append(' if not v:') model_file_lines.append(' return v') model_file_lines.append('') model_file_lines.append( f" return getattr(validators, f'{model_id}_{{field.name}}', identity)(v, field=field)" ) for option_name, import_paths in validator_data: for import_path in import_paths: validator_name = import_path.replace('.', '_') model_file_lines.append('') model_file_lines.append( f' @validator({option_name!r})') model_file_lines.append( f' def _run_{option_name}_{validator_name}(cls, v, field):' ) # TODO: remove conditional when there is a workaround: # https://github.com/samuelcolvin/pydantic/issues/2376 model_file_lines.append(' if not v:') model_file_lines.append(' return v') model_file_lines.append('') model_file_lines.append( f' return validation.{import_path}(v, field=field)' ) model_file_lines.append('') model_file_lines.append(' @root_validator(pre=False)') model_file_lines.append( ' def _final_validation(cls, values):') model_file_lines.append( f" return validation.core.finalize_config(" f"getattr(validators, 'finalize_{model_id}', identity)(values))" ) model_file_lines.append('') model_file_contents = '\n'.join(model_file_lines) if any(len(line) > 120 for line in model_file_lines): model_file_contents = self.code_formatter.apply_black( model_file_contents) model_files[model_file_name] = (model_file_contents, errors) # Logs-only integrations if not model_files: continue if defaults_file_lines: if defaults_file_needs_dynamic_values: defaults_file_lines.insert( 0, 'from datadog_checks.base.utils.models.fields import get_default_field_value' ) defaults_file_lines.append('') defaults_file_contents = '\n'.join(defaults_file_lines) if defaults_file_needs_value_normalization: defaults_file_contents = self.code_formatter.apply_black( defaults_file_contents) model_files['defaults.py'] = (defaults_file_contents, []) if deprecation_data: file_needs_formatting = False deprecations_file_lines = [] for model_id, deprecations in deprecation_data.items(): deprecations_file_lines.append('') deprecations_file_lines.append('') deprecations_file_lines.append(f'def {model_id}():') deprecations_file_lines.append( f' return {deprecations!r}') if len(deprecations_file_lines[-1]) > 120: file_needs_formatting = True deprecations_file_lines.append('') deprecations_file_contents = '\n'.join(deprecations_file_lines) if file_needs_formatting: deprecations_file_contents = self.code_formatter.apply_black( deprecations_file_contents) model_files['deprecations.py'] = (deprecations_file_contents, []) model_data.sort() package_root_lines = [] for model_id, schema_name in model_data: package_root_lines.append( f'from .{model_id} import {schema_name}') package_root_lines.append('') package_root_lines.append('') package_root_lines.append('class ConfigMixin:') for model_id, schema_name in model_data: package_root_lines.append( f' _config_model_{model_id}: {schema_name}') for model_id, schema_name in model_data: property_name = 'config' if model_id == 'instance' else f'{model_id}_config' package_root_lines.append('') package_root_lines.append(' @property') package_root_lines.append( f' def {property_name}(self) -> {schema_name}:') package_root_lines.append( f' return self._config_model_{model_id}') package_root_lines.append('') model_files['__init__.py'] = ('\n'.join(package_root_lines), []) # Custom model_files['validators.py'] = ('', []) files[file['name']] = { file_name: model_files[file_name] for file_name in sorted(model_files) } return files
def _process_section(self, section) -> (List[Tuple[str, str]], dict, ModelInfo): # Values to return # [(model_id, schema_name)] package_info: List[Tuple[str, str]] = [] # { model_file_name: (model_file_contents, errors) } model_files: Dict[str, Tuple[str, List[str]]] = {} model_info = ModelInfo() errors: List[str] = [] section_name = section['name'] if section_name == 'init_config': model_id = 'shared' model_file_name = f'{model_id}.py' schema_name = 'SharedConfig' elif section_name == 'instances': model_id = 'instance' model_file_name = f'{model_id}.py' schema_name = 'InstanceConfig' if section['multiple_instances_defined']: section = self._merge_instances(section, errors) # Skip anything checks don't use directly else: return ( package_info, model_files, model_info, ) package_info.append((model_id, schema_name)) (section_openapi_document, model_info) = build_openapi_document(section, model_id, schema_name, errors) try: section_parser = OpenAPIParser( yaml.safe_dump(section_openapi_document), target_python_version=PythonVersion.PY_38, enum_field_as_literal=LiteralType.All, encoding='utf-8', use_generic_container_types=True, enable_faux_immutability=True, # TODO: uncomment when the Agent upgrades Python to 3.9 # use_standard_collections=True, strip_default_none=True, # https://github.com/koxudaxi/datamodel-code-generator/pull/173 field_constraints=True, ) parsed_section = section_parser.parse() except Exception as e: errors.append( f'Error parsing the OpenAPI schema `{schema_name}`: {e}') model_files[model_file_name] = ('', errors) return ( package_info, model_files, model_info, ) model_file_contents = build_model_file( parsed_section, model_id, section_name, model_info, self.code_formatter, ) # instance.py or shared.py model_files[model_file_name] = (model_file_contents, errors) return ( package_info, model_files, model_info, )
def test_openapi_model_resolver(): parser = OpenAPIParser(source=(DATA_PATH / 'api.yaml')) parser.parse() assert parser.model_resolver.references == { '#/components/schemas/Event': Reference( path='#/components/schemas/Event', original_name='Event', name='Event', loaded=True, ), '#/components/schemas/Pet': Reference( path='#/components/schemas/Pet', original_name='Pet', name='Pet', loaded=True, ), 'api.yaml#/components/schemas/Error': Reference( path='api.yaml#/components/schemas/Error', original_name='Error', name='Error', loaded=True, ), 'api.yaml#/components/schemas/Event': Reference( path='api.yaml#/components/schemas/Event', original_name='Event', name='Event', loaded=True, ), 'api.yaml#/components/schemas/Id': Reference( path='api.yaml#/components/schemas/Id', original_name='Id', name='Id', loaded=True, ), 'api.yaml#/components/schemas/Pet': Reference( path='api.yaml#/components/schemas/Pet', original_name='Pet', name='Pet', loaded=True, ), 'api.yaml#/components/schemas/Pets': Reference( path='api.yaml#/components/schemas/Pets', original_name='Pets', name='Pets', loaded=True, ), 'api.yaml#/components/schemas/Result': Reference( path='api.yaml#/components/schemas/Result', original_name='Result', name='Result', loaded=True, ), 'api.yaml#/components/schemas/Rules': Reference( path='api.yaml#/components/schemas/Rules', original_name='Rules', name='Rules', loaded=True, ), 'api.yaml#/components/schemas/Users': Reference( path='api.yaml#/components/schemas/Users', original_name='Users', name='Users', loaded=True, ), 'api.yaml#/components/schemas/Users/Users': Reference( path='api.yaml#/components/schemas/Users/Users', original_name='Users', name='User', loaded=True, ), 'api.yaml#/components/schemas/apis': Reference( path='api.yaml#/components/schemas/apis', original_name='apis', name='Apis', loaded=True, ), 'api.yaml#/components/schemas/apis/Apis': Reference( path='api.yaml#/components/schemas/apis/Apis', original_name='Apis', name='Api', loaded=True, ), }
def test_openapi_parser_parse_alias(): parser = OpenAPIParser( BaseModel, CustomRootType, data_model_field_type=DataModelField, text=Path(DATA_PATH / 'alias.yaml').read_text(), ) assert parser.parse() == { ('wo_o', '__init__.py'): '', ( 'wo_o', 'bo_o.py', ): "from __future__ import annotations\n" "\n" "from typing import Optional\n" "\n" "from pydantic import BaseModel, Field\n" "\n" "from .. import Source, fo_o\n" "\n" "\n" "class ChocolatE(BaseModel):\n" " flavour_name: Optional[str] = Field(None, alias='flavour-name')\n" " sourc_e: Optional[Source] = Field(None, alias='sourc-e')\n" " coco_a: Optional[fo_o.CocoA] = Field(None, alias='coco-a')\n", ('__init__.py', ): "from __future__ import annotations\n" "\n" "from datetime import date, datetime\n" "from enum import Enum\n" "from typing import List, Optional\n" "\n" "from pydantic import BaseModel, Field, conint\n" "\n" "from . import model_s\n" "\n" "\n" "class Pet(Enum):\n" " ca_t = 'ca-t'\n" " dog_ = 'dog*'\n" "\n" "\n" "class Error(BaseModel):\n" " code: int\n" " message: str\n" "\n" "\n" "class HomeAddress(BaseModel):\n" " address_1: Optional[str] = Field(None, alias='address-1')\n" "\n" "\n" "class TeamMembers(BaseModel):\n" " __root__: List[str]\n" "\n" "\n" "class AllOfObj(BaseModel):\n" " name: Optional[str] = None\n" " number: Optional[str] = None\n" "\n" "\n" "class Id(BaseModel):\n" " __root__: str\n" "\n" "\n" "class Result(BaseModel):\n" " event: Optional[model_s.EvenT] = None\n" "\n" "\n" "class Source(BaseModel):\n" " country_name: Optional[str] = Field(None, alias='country-name')\n" "\n" "\n" "class UserName(BaseModel):\n" " first_name: Optional[str] = Field(None, alias='first-name')\n" " home_address: Optional[HomeAddress] = Field(None, alias='home-address')\n" "\n" "\n" "class AllOfRef(UserName, HomeAddress):\n" " pass\n" "\n" "\n" "class AllOfCombine(UserName):\n" " birth_date: Optional[date] = Field(None, alias='birth-date')\n" " size: Optional[conint(ge=1)] = None\n" "\n" "\n" "class AnyOfCombine(HomeAddress, UserName):\n" " age: Optional[str] = None\n" "\n" "\n" "class Item(HomeAddress, UserName):\n" " age: Optional[str] = None\n" "\n" "\n" "class AnyOfCombineInObject(BaseModel):\n" " item: Optional[Item] = None\n" "\n" "\n" "class AnyOfCombineInArrayItem(HomeAddress, UserName):\n" " age: Optional[str] = None\n" "\n" "\n" "class AnyOfCombineInArray(BaseModel):\n" " __root__: List[AnyOfCombineInArrayItem]\n" "\n" "\n" "class AnyOfCombineInRoot(HomeAddress, UserName):\n" " age: Optional[str] = None\n" " birth_date: Optional[datetime] = Field(None, alias='birth-date')\n", ('model_s.py', ): "from __future__ import annotations\n" "\n" "from enum import Enum\n" "from typing import Any, Dict, List, Optional, Union\n" "\n" "from pydantic import BaseModel\n" "\n" "\n" "class SpecieS(Enum):\n" " dog = 'dog'\n" " cat = 'cat'\n" " snake = 'snake'\n" "\n" "\n" "class PeT(BaseModel):\n" " id: int\n" " name: str\n" " tag: Optional[str] = None\n" " species: Optional[SpecieS] = None\n" "\n" "\n" "class UseR(BaseModel):\n" " id: int\n" " name: str\n" " tag: Optional[str] = None\n" "\n" "\n" "class EvenT(BaseModel):\n" " name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None\n", ( 'fo_o', '__init__.py', ): "from __future__ import annotations\n" "\n" "from typing import Optional\n" "\n" "from pydantic import BaseModel, Field\n" "\n" "from .. import Id\n" "\n" "\n" "class TeA(BaseModel):\n" " flavour_name: Optional[str] = Field(None, alias='flavour-name')\n" " id: Optional[Id] = None\n" "\n" "\n" "class CocoA(BaseModel):\n" " quality: Optional[int] = None\n", ( 'fo_o', 'ba_r.py', ): "from __future__ import annotations\n" "\n" "from typing import Any, Dict, List, Optional\n" "\n" "from pydantic import BaseModel, Field\n" "\n" "\n" "class ThinG(BaseModel):\n" " attribute_s: Optional[Dict[str, Any]] = Field(None, alias='attribute-s')\n" "\n" "\n" "class ThanG(BaseModel):\n" " attributes: Optional[List[Dict[str, Any]]] = None\n" "\n" "\n" "class ClonE(ThinG):\n" " pass\n", ('collection_s.py', ): 'from __future__ import annotations\n' '\n' 'from typing import List, Optional\n' '\n' 'from pydantic import AnyUrl, BaseModel, Field\n' '\n' 'from . import model_s\n' '\n' '\n' 'class PetS(BaseModel):\n' ' __root__: List[model_s.PeT]\n' '\n' '\n' 'class UserS(BaseModel):\n' ' __root__: List[model_s.UseR]\n' '\n' '\n' 'class RuleS(BaseModel):\n' ' __root__: List[str]\n' '\n' '\n' 'class Api(BaseModel):\n' ' apiKey: Optional[str] = Field(\n' ' None, description=\'To be used as a dataset parameter value\'\n' ' )\n' ' apiVersionNumber: Optional[str] = Field(\n' ' None, description=\'To be used as a version parameter value\'\n' ' )\n' ' apiUrl: Optional[AnyUrl] = Field(\n' ' None, description="The URL describing the dataset\'s fields"\n' ' )\n' ' apiDocumentationUrl: Optional[AnyUrl] = Field(\n' ' None, description=\'A URL to the API console for each API\'\n' ' )\n' '\n' '\n' 'class ApiS(BaseModel):\n' ' __root__: List[Api]\n', }
def main(args: Optional[Sequence[str]] = None) -> Exit: """Main function.""" # add cli completion support argcomplete.autocomplete(arg_parser) if args is None: args = sys.argv[1:] namespace: Namespace = arg_parser.parse_args(args) if namespace.version: # pragma: no cover from datamodel_code_generator.version import version print(version) exit(0) if namespace.debug: # pragma: no cover enable_debug_message() from datamodel_code_generator.parser.openapi import OpenAPIParser extra_template_data: Optional[DefaultDict[str, Dict]] if namespace.extra_template_data is not None: with namespace.extra_template_data as data: extra_template_data = json.load( data, object_hook=lambda d: defaultdict(dict, **d)) else: extra_template_data = None parser = OpenAPIParser( BaseModel, CustomRootType, base_class=namespace.base_class, custom_template_dir=namespace.custom_template_dir, extra_template_data=extra_template_data, target_python_version=PythonVersion(namespace.target_python_version), text=namespace.input.read(), dump_resolve_reference_action=dump_resolve_reference_action, ) output = Path(namespace.output) if namespace.output is not None else None with chdir(output): result = parser.parse() if isinstance(result, str): modules = {output: result} else: if output is None: print('Modular references require an output directory') return Exit.ERROR if output.suffix: print('Modular references require an output directory, not a file') return Exit.ERROR modules = { output.joinpath(*name): body for name, body in result.items() } timestamp = datetime.now(timezone.utc).replace(microsecond=0).isoformat() header = f'''\ # generated by datamodel-codegen: # filename: {Path(namespace.input.name).name} # timestamp: {timestamp}''' file: Optional[IO[Any]] for path, body in modules.items(): if path is not None: if not path.parent.exists(): path.parent.mkdir() file = path.open('wt') else: file = None print(header, file=file) if body: print('', file=file) print(body.rstrip(), file=file) if file is not None: file.close() return Exit.OK
def test_openapi_parser_parse_enum_models(): parser = OpenAPIParser(BaseModel, CustomRootType, text=Path(DATA_PATH / 'enum_models.yaml').read_text()) assert (parser.parse() == '''from __future__ import annotations from enum import Enum from typing import List, Optional from pydantic import BaseModel class Pet(BaseModel): id: int name: str tag: Optional[str] = None class Pets(BaseModel): __root__: List[Pet] class Error(BaseModel): code: int message: str class Type(Enum): a = 'a' b = 'b' class EnumObject(BaseModel): type: Optional[Type] = None class EnumRoot(Enum): a = 'a' b = 'b' class IntEnum(Enum): number_1 = 1 number_2 = 2 class AliasEnum(Enum): a = 1 b = 2 c = 3 ''') parser = OpenAPIParser( BaseModel, CustomRootType, text=Path(DATA_PATH / 'enum_models.yaml').read_text(), target_python_version=PythonVersion.PY_36, ) assert (parser.parse() == '''from enum import Enum from typing import List, Optional from pydantic import BaseModel class Pet(BaseModel): id: int name: str tag: Optional[str] = None class Pets(BaseModel): __root__: List['Pet'] class Error(BaseModel): code: int message: str class Type(Enum): a = 'a' b = 'b' class EnumObject(BaseModel): type: Optional['Type'] = None class EnumRoot(Enum): a = 'a' b = 'b' class IntEnum(Enum): number_1 = 1 number_2 = 2 class AliasEnum(Enum): a = 1 b = 2 c = 3 ''')
def test_parse_array(source_obj, generated_classes): parser = OpenAPIParser('') parser.parse_array('Pets', JsonSchemaObject.parse_obj(source_obj), []) assert dump_templates(list(parser.results)) == generated_classes
def test_openapi_parser_parse_allof(): parser = OpenAPIParser(BaseModel, CustomRootType, text=Path(DATA_PATH / 'allof.yaml').read_text()) assert (parser.parse() == '''from __future__ import annotations from datetime import date, datetime from typing import List, Optional from pydantic import BaseModel, conint class Pet(BaseModel): id: int name: str tag: Optional[str] = None class Car(BaseModel): number: str class AllOfref(Pet, Car): pass class AllOfobj(BaseModel): name: Optional[str] = None number: Optional[str] = None class AllOfCombine(Pet): birthdate: Optional[date] = None size: Optional[conint(ge=1.0)] = None class AnyOfCombine(Pet, Car): age: Optional[str] = None class item(Pet, Car): age: Optional[str] = None class AnyOfCombineInObject(BaseModel): item: Optional[item] = None class AnyOfCombineInArrayItem(Pet, Car): age: Optional[str] = None class AnyOfCombineInArray(BaseModel): __root__: List[AnyOfCombineInArrayItem] class AnyOfCombineInRoot(Pet, Car): age: Optional[str] = None birthdate: Optional[datetime] = None class Error(BaseModel): code: int message: str ''')
def test_openapi_parser_parse_resolved_models(): parser = OpenAPIParser(Path(DATA_PATH / 'resolved_models.yaml'), ) assert (parser.parse() == (EXPECTED_OPEN_API_PATH / 'openapi_parser_parse_resolved_models' / 'output.py').read_text())
def test_openapi_model_resolver(): parser = OpenAPIParser( BaseModel, CustomRootType, text=Path(DATA_PATH / 'api.yaml').read_text(), ) parser.parse() assert parser.model_resolver.references == { '#/components/schemas/Error': Reference( path=['#/components', 'schemas', 'Error'], original_name='Error', name='Error', loaded=True, ), '#/components/schemas/Event': Reference( path=['#/components', 'schemas', 'Event'], original_name='Event', name='Event', loaded=True, ), '#/components/schemas/Id': Reference( path=['#/components', 'schemas', 'Id'], original_name='Id', name='Id', loaded=True, ), '#/components/schemas/Pet': Reference( path=['#/components', 'schemas', 'Pet'], original_name='Pet', name='Pet', loaded=True, ), '#/components/schemas/Pets': Reference( path=['#/components', 'schemas', 'Pets'], original_name='Pets', name='Pets', loaded=True, ), '#/components/schemas/Result': Reference( path=['#/components', 'schemas', 'Result'], original_name='Result', name='Result', loaded=True, ), '#/components/schemas/Rules': Reference( path=['#/components', 'schemas', 'Rules'], original_name='Rules', name='Rules', loaded=True, ), '#/components/schemas/Users': Reference( path=['#/components', 'schemas', 'Users'], original_name='Users', name='Users', loaded=True, ), '#/components/schemas/Users/Users': Reference( path=['#/components', 'schemas', 'Users', 'Users'], original_name='Users', name='User', loaded=True, ), '#/components/schemas/apis': Reference( path=['#/components', 'schemas', 'apis'], original_name='apis', name='Apis', loaded=True, ), '#/components/schemas/apis/Apis': Reference( path=['#/components', 'schemas', 'apis', 'Apis'], original_name='Apis', name='Api', loaded=True, ), }
def test_openapi_model_resolver(): parser = OpenAPIParser(source=(DATA_PATH / 'api.yaml')) parser.parse() references = { k: v.dict(exclude={'source', 'module_name', 'actual_module_name'}, ) for k, v in parser.model_resolver.references.items() } assert references == { 'api.yaml#/components/schemas/Error': { 'loaded': True, 'name': 'Error', 'original_name': 'Error', 'path': 'api.yaml#/components/schemas/Error', }, 'api.yaml#/components/schemas/Event': { 'loaded': True, 'name': 'Event', 'original_name': 'Event', 'path': 'api.yaml#/components/schemas/Event', }, 'api.yaml#/components/schemas/Id': { 'loaded': True, 'name': 'Id', 'original_name': 'Id', 'path': 'api.yaml#/components/schemas/Id', }, 'api.yaml#/components/schemas/Pet': { 'loaded': True, 'name': 'Pet', 'original_name': 'Pet', 'path': 'api.yaml#/components/schemas/Pet', }, 'api.yaml#/components/schemas/Pets': { 'loaded': True, 'name': 'Pets', 'original_name': 'Pets', 'path': 'api.yaml#/components/schemas/Pets', }, 'api.yaml#/components/schemas/Result': { 'loaded': True, 'name': 'Result', 'original_name': 'Result', 'path': 'api.yaml#/components/schemas/Result', }, 'api.yaml#/components/schemas/Rules': { 'loaded': True, 'name': 'Rules', 'original_name': 'Rules', 'path': 'api.yaml#/components/schemas/Rules', }, 'api.yaml#/components/schemas/Users': { 'loaded': True, 'name': 'Users', 'original_name': 'Users', 'path': 'api.yaml#/components/schemas/Users', }, 'api.yaml#/components/schemas/Users/Users/0': { 'loaded': True, 'name': 'User', 'original_name': 'Users', 'path': 'api.yaml#/components/schemas/Users/Users/0', }, 'api.yaml#/components/schemas/apis': { 'loaded': True, 'name': 'Apis', 'original_name': 'apis', 'path': 'api.yaml#/components/schemas/apis', }, 'api.yaml#/components/schemas/apis/apis/0': { 'loaded': True, 'name': 'Api', 'original_name': 'apis', 'path': 'api.yaml#/components/schemas/apis/apis/0', }, }
def generate_code(input_name: str, input_text: str, output_dir: Path, template_dir: Optional[Path]) -> None: if not output_dir.exists(): output_dir.mkdir(parents=True) if not template_dir: template_dir = BUILTIN_TEMPLATE_DIR model_parser = OpenAPIModelParser(source=input_text, ) parser = OpenAPIParser(input_name, input_text, openapi_model_parser=model_parser) parsed_object: ParsedObject = parser.parse() environment: Environment = Environment(loader=FileSystemLoader( template_dir if template_dir else f"{Path(__file__).parent}/template", encoding="utf8", ), ) results: Dict[Path, str] = {} code_formatter = CodeFormatter(PythonVersion.PY_38, Path().resolve()) for target in template_dir.rglob("*"): relative_path = target.relative_to(template_dir) result = environment.get_template(str(relative_path)).render( operations=parsed_object.operations, imports=parsed_object.imports, info=parsed_object.info, ) results[relative_path] = code_formatter.format_code(result) timestamp = datetime.now(timezone.utc).replace(microsecond=0).isoformat() header = f"""\ # generated by fastapi-codegen: # filename: {Path(input_name).name} # timestamp: {timestamp}""" for path, code in results.items(): with output_dir.joinpath(path.with_suffix(".py")).open("wt") as file: print(header, file=file) print("", file=file) print(code.rstrip(), file=file) with chdir(output_dir): results = model_parser.parse() if not results: return elif isinstance(results, str): output = output_dir / MODEL_PATH modules = {output: (results, input_name)} else: raise Exception('Modular references are not supported in this version') header = f'''\ # generated by fastapi-codegen: # filename: {{filename}}''' # if not disable_timestamp: header += f'\n# timestamp: {timestamp}' for path, body_and_filename in modules.items(): body, filename = body_and_filename if path is None: file = None else: if not path.parent.exists(): path.parent.mkdir(parents=True) file = path.open('wt', encoding='utf8') print(header.format(filename=filename), file=file) if body: print('', file=file) print(body.rstrip(), file=file) if file is not None: file.close()