def process_schema(model: OpenmindsSchema): url = raw_url.format( ref=openminds_ref, openminds_v=openminds_v, domain=model.domain, schema=model.schema, ) resp = requests.get(url) assert resp.status_code == 200 resp_text = resp.text target_dir = f'{path_to_currdir}/{model.domain}/{path.dirname(model.schema)}' os.makedirs(target_dir, exist_ok=True) output_filename = f'{path_to_currdir}/{model.domain}/{model.schema}.py' generate(resp_text, target_python_version=PythonVersion.PY_36, input_file_type=InputFileType.JsonSchema, input_filename=url, snake_case_field=True, output=Path(output_filename), disable_timestamp=True, base_class="siibra.openminds.base.SiibraBaseModel") with open(output_filename, "r") as fp: txt = fp.read() with open(output_filename, "w") as fp: fp.write(txt.replace("https://openminds.ebrains.eu/vocab/", ""))
def main(args: Optional[Sequence[str]] = None) -> Exit: """Main function.""" # add cli completion support argcomplete.autocomplete(arg_parser) if args is None: args = sys.argv[1:] namespace: Namespace = arg_parser.parse_args(args) if namespace.version: # pragma: no cover from datamodel_code_generator.version import version print(version) exit(0) if namespace.debug: # pragma: no cover enable_debug_message() extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] if namespace.extra_template_data is not None: with namespace.extra_template_data as data: extra_template_data = json.load( data, object_hook=lambda d: defaultdict(dict, **d)) else: extra_template_data = None try: generate( input_name=namespace.input.name, input_text=namespace.input.read(), input_file_type=InputFileType(namespace.input_file_type), output=Path(namespace.output) if namespace.output is not None else None, target_python_version=PythonVersion( namespace.target_python_version), base_class=namespace.base_class, custom_template_dir=namespace.custom_template_dir, extra_template_data=extra_template_data, validation=namespace.validation, field_constraints=namespace.field_constraints, ) return Exit.OK except Error as e: print(str(e), file=sys.stderr) return Exit.ERROR except Exception: import traceback print(traceback.format_exc(), file=sys.stderr) return Exit.ERROR
def main(args: Optional[Sequence[str]] = None) -> Exit: """Main function.""" # add cli completion support argcomplete.autocomplete(arg_parser) if args is None: args = sys.argv[1:] namespace: Namespace = arg_parser.parse_args(args) if namespace.version: # pragma: no cover from datamodel_code_generator.version import version print(version) exit(0) root = black.find_project_root((Path().resolve(), )) pyproject_toml_path = root / "pyproject.toml" if pyproject_toml_path.is_file(): pyproject_toml: Dict[str, Any] = { k.replace('-', '_'): v for k, v in toml.load(str(pyproject_toml_path)).get( 'tool', {}).get('datamodel-codegen', {}).items() } else: pyproject_toml = {} try: config = Config.parse_obj(pyproject_toml) config.merge_args(namespace) except Error as e: print(e.message, file=sys.stderr) return Exit.ERROR if not is_supported_in_black( config.target_python_version): # pragma: no cover print( f"Installed black doesn't support Python version {config.target_python_version.value}.\n" f"You have to install a newer black.\n" f"Installed black version: {black.__version__}", file=sys.stderr, ) return Exit.ERROR if config.debug: # pragma: no cover enable_debug_message() extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] if config.extra_template_data is None: extra_template_data = None else: with config.extra_template_data as data: try: extra_template_data = json.load( data, object_hook=lambda d: defaultdict(dict, **d)) except json.JSONDecodeError as e: print(f"Unable to load extra template data: {e}", file=sys.stderr) return Exit.ERROR if config.aliases is None: aliases = None else: with config.aliases as data: try: aliases = json.load(data) except json.JSONDecodeError as e: print(f"Unable to load alias mapping: {e}", file=sys.stderr) return Exit.ERROR if not isinstance(aliases, dict) or not all( isinstance(k, str) and isinstance(v, str) for k, v in aliases.items()): print( 'Alias mapping must be a JSON string mapping (e.g. {"from": "to", ...})', file=sys.stderr, ) return Exit.ERROR try: generate( input_=config.url or config.input or sys.stdin.read(), input_file_type=config.input_file_type, output=config.output, target_python_version=config.target_python_version, base_class=config.base_class, custom_template_dir=config.custom_template_dir, validation=config.validation, field_constraints=config.field_constraints, snake_case_field=config.snake_case_field, strip_default_none=config.strip_default_none, extra_template_data=extra_template_data, aliases=aliases, disable_timestamp=config.disable_timestamp, allow_population_by_field_name=config. allow_population_by_field_name, apply_default_values_for_required_fields=config.use_default, force_optional_for_required_fields=config.force_optional, class_name=config.class_name, use_standard_collections=config.use_standard_collections, use_schema_description=config.use_schema_description, reuse_model=config.reuse_model, encoding=config.encoding, enum_field_as_literal=config.enum_field_as_literal, set_default_enum_member=config.set_default_enum_member, strict_nullable=config.strict_nullable, use_generic_container_types=config.use_generic_container_types, enable_faux_immutability=config.enable_faux_immutability, disable_appending_item_suffix=config.disable_appending_item_suffix, strict_types=config.strict_types, empty_enum_field_name=config.empty_enum_field_name, field_extra_keys=config.field_extra_keys, field_include_all_keys=config.field_include_all_keys, ) return Exit.OK except InvalidClassNameError as e: print(f'{e} You have to set `--class-name` option', file=sys.stderr) return Exit.ERROR except Error as e: print(str(e), file=sys.stderr) return Exit.ERROR except Exception: import traceback print(traceback.format_exc(), file=sys.stderr) return Exit.ERROR
def main(args: Optional[Sequence[str]] = None) -> Exit: """Main function.""" # add cli completion support argcomplete.autocomplete(arg_parser) if args is None: args = sys.argv[1:] namespace: Namespace = arg_parser.parse_args(args) if namespace.version: # pragma: no cover from datamodel_code_generator.version import version print(version) exit(0) root = black.find_project_root((Path().resolve(),)) pyproject_toml_path = root / "pyproject.toml" if pyproject_toml_path.is_file(): pyproject_toml: Dict[str, Any] = { k.replace('-', '_'): v for k, v in toml.load(str(pyproject_toml_path)) .get('tool', {}) .get('datamodel-codegen', {}) .items() } else: pyproject_toml = {} config = Config.parse_obj(pyproject_toml) config.merge_args(namespace) if config.input is not None: input_name: str = config.input.name # type: ignore input_text: str = config.input.read() else: input_name = '<stdin>' input_text = sys.stdin.read() if config.debug: # pragma: no cover enable_debug_message() extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] if config.extra_template_data is not None: with config.extra_template_data as data: try: extra_template_data = json.load( data, object_hook=lambda d: defaultdict(dict, **d) ) except json.JSONDecodeError as e: print(f"Unable to load extra template data: {e}", file=sys.stderr) return Exit.ERROR else: extra_template_data = None if config.aliases is not None: with config.aliases as data: try: aliases = json.load(data) except json.JSONDecodeError as e: print(f"Unable to load alias mapping: {e}", file=sys.stderr) return Exit.ERROR if not isinstance(aliases, Dict) or not all( isinstance(k, str) and isinstance(v, str) for k, v in aliases.items() ): print( 'Alias mapping must be a JSON string mapping (e.g. {"from": "to", ...})', file=sys.stderr, ) return Exit.ERROR else: aliases = None try: generate( input_name=input_name, input_text=input_text, input_file_type=config.input_file_type, output=config.output, target_python_version=config.target_python_version, base_class=config.base_class, custom_template_dir=config.custom_template_dir, validation=config.validation, field_constraints=config.field_constraints, snake_case_field=config.snake_case_field, strip_default_none=config.strip_default_none, extra_template_data=extra_template_data, aliases=aliases, disable_timestamp=config.disable_timestamp, allow_population_by_field_name=config.allow_population_by_field_name, use_default_on_required_field=config.use_default, ) return Exit.OK except Error as e: print(str(e), file=sys.stderr) return Exit.ERROR except Exception: import traceback print(traceback.format_exc(), file=sys.stderr) return Exit.ERROR
"lastName": { "type": "string", "description": "The person's last name." }, "age": { "description": "Age in years which must be equal to or greater than zero.", "type": "integer", "minimum": 0 }, "friends": { "type": "array" }, "comment": { "type": "null" } } } """ with TemporaryDirectory() as temporary_directory_name: temporary_directory = Path(temporary_directory_name) output = Path(temporary_directory / 'model.py') generate( json_schema, input_file_type=InputFileType.JsonSchema, input_filename="example.json", output=output, ) model: str = output.read_text() print(model)
def main(args: Optional[Sequence[str]] = None) -> Exit: """Main function.""" # add cli completion support argcomplete.autocomplete(arg_parser) if args is None: args = sys.argv[1:] namespace: Namespace = arg_parser.parse_args(args) if namespace.version: # pragma: no cover from datamodel_code_generator.version import version print(version) exit(0) if namespace.debug: # pragma: no cover enable_debug_message() extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] if namespace.extra_template_data is not None: with namespace.extra_template_data as data: try: extra_template_data = json.load( data, object_hook=lambda d: defaultdict(dict, **d)) except json.JSONDecodeError as e: print(f"Unable to load extra template data: {e}", file=sys.stderr) return Exit.ERROR else: extra_template_data = None if namespace.aliases is not None: with namespace.aliases as data: try: aliases = json.load(data) except json.JSONDecodeError as e: print(f"Unable to load alias mapping: {e}", file=sys.stderr) return Exit.ERROR if not isinstance(aliases, Dict) or not all( isinstance(k, str) and isinstance(v, str) for k, v in aliases.items()): print( 'Alias mapping must be a JSON string mapping (e.g. {"from": "to", ...})', file=sys.stderr, ) return Exit.ERROR else: aliases = None try: generate( input_name=namespace.input.name, input_text=namespace.input.read(), input_file_type=InputFileType(namespace.input_file_type), output=Path(namespace.output) if namespace.output is not None else None, target_python_version=PythonVersion( namespace.target_python_version), base_class=namespace.base_class, custom_template_dir=namespace.custom_template_dir, extra_template_data=extra_template_data, validation=namespace.validation, field_constraints=namespace.field_constraints, aliases=aliases, ) return Exit.OK except Error as e: print(str(e), file=sys.stderr) return Exit.ERROR except Exception: import traceback print(traceback.format_exc(), file=sys.stderr) return Exit.ERROR
from datamodel_code_generator import InputFileType, generate # Retrieve OpenAPI schema api = json.loads(requests.get("https://start.camunda.com/openapi.json").text) # Apply patches patches = [{ 'op': 'remove', 'path': '/components/schemas/VariableValueDto/properties/value/type', }, { 'op': 'remove', 'path': '/components/schemas/VariableValueDto/properties/valueInfo/type', }] patch = jsonpatch.JsonPatch(patches) patched_api = patch.apply(api) # Generate pydantic models generate(input_=json.dumps(patched_api), input_file_type=InputFileType.OpenAPI, output=Path("src/camunda/models/generated.py"), allow_population_by_field_name=True, snake_case_field=True, use_generic_container_types=True, use_schema_description=True, reuse_model=True)
def create_data_model_file(*, path: Union[Path, str], url: str = None, schema: Union[Path, str, ParseResult] = None, schema_type: Union[ str, InputFileType] = InputFileType.JsonSchema, class_name: str = None) -> None: """ This will create a data model from data model definitions. The schemas can either downloaded from a url or passed as str or dict. Allowed input types are defined but the underlying toolbox. Many data models suited for FIWARE are located here: https://github.com/smart-data-models/data-models Args: path: path where the generated code should saved url: url to download the definition from schema_type (str): `auto`, `openapi`, `jsonschema`, `json`, `yaml`, `dict`, `csv` class_name: classname for the model class Returns: None Examples:: { "type": "object", "properties": { "number": { "type": "number" }, "street_name": { "type": "string" }, "street_type": { "type": "string", "enum": ["Street", "Avenue", "Boulevard"] } } } """ if isinstance(path, str): path = Path(path) path.parent.mkdir(parents=True, exist_ok=True) if isinstance(schema_type, str): schema_type = InputFileType(schema_type) with TemporaryDirectory() as temp: temp = Path(temp) output = Path(temp).joinpath(f'{uuid4()}.py') if url: schema = parse.urlparse(url) if not schema: raise ValueError("Missing argument! Either 'url' or 'schema' " "must be provided") generate(input_=schema, input_file_type=schema_type, output=output, class_name=class_name) # move temporary file to output directory shutil.move(str(output), str(path))