def parse(self, with_import: Optional[bool] = True, format_: Optional[bool] = True) -> str: for obj_name, raw_obj in self.base_parser.specification['components'][ 'schemas'].items(): # type: str, Dict obj = JsonSchemaObject.parse_obj(raw_obj) if obj.is_object: self.parse_object(obj_name, obj) elif obj.is_array: self.parse_array(obj_name, obj) elif obj.enum: self.parse_enum(obj_name, obj) elif obj.allOf: self.parse_all_of(obj_name, obj) else: self.parse_root_type(obj_name, obj) result: str = '' if with_import: if self.target_python_version == PythonVersion.PY_37: self.imports.append(IMPORT_ANNOTATIONS) result += f'{self.imports.dump()}\n\n\n' _, sorted_data_models, require_update_action_models = sort_data_models( self.results) result += dump_templates(list(sorted_data_models.values())) if self.dump_resolve_reference_action: result += f'\n\n{self.dump_resolve_reference_action(require_update_action_models)}' if format_: result = format_code(result, self.target_python_version) return result
def generate_controllers_code(environment, parsed_object) -> Dict: results: Dict[Path, str] = {} template_path = Path('controller.jinja2') # group by path grouped_operations = defaultdict(list) for k, g in itertools.groupby( parsed_object.operations, key=lambda x: x.path.strip('/').split('/')[0]): grouped_operations[k] += list(g) # render each group in separate file for name, operations in grouped_operations.items(): result = environment.get_template(str(template_path)).render( operations=operations, imports=parsed_object.imports, name=name) results[Path(name)] = format_code(result, PythonVersion.PY_38) return results
def generate_code( input_name: str, input_text: str, output_dir: Path, template_dir: Optional[Path] ) -> None: if not output_dir.exists(): output_dir.mkdir(parents=True) if not template_dir: template_dir = BUILTIN_TEMPLATE_DIR parser = OpenAPIParser(input_name, input_text, model_path=MODEL_PATH.stem) parsed_object: ParsedObject = parser.parse() environment: Environment = Environment( loader=FileSystemLoader( template_dir if template_dir else f"{Path(__file__).parent}/template", encoding="utf8", ), ) results: Dict[Path, str] = {} for target in template_dir.rglob("*"): relative_path = target.relative_to(template_dir.absolute()) result = environment.get_template(str(relative_path)).render( operations=parsed_object.operations, imports=parsed_object.imports, info=parsed_object.info, ) results[relative_path] = format_code( result, PythonVersion.PY_38, Path().resolve() ) timestamp = datetime.now(timezone.utc).replace(microsecond=0).isoformat() header = f"""\ # generated by fastapi-codegen: # filename: {Path(input_name).name} # timestamp: {timestamp}""" for path, code in results.items(): with output_dir.joinpath(path.with_suffix(".py")).open("wt") as file: print(header, file=file) print("", file=file) print(code.rstrip(), file=file) generate_models( input_=input_text, input_filename=input_name, input_file_type=InputFileType.OpenAPI, output=output_dir / MODEL_PATH, target_python_version=PythonVersion.PY_38, )
def parse( self, with_import: Optional[bool] = True, format_: Optional[bool] = True ) -> Union[str, Dict[Tuple[str, ...], str]]: self.parse_raw() if with_import: if self.target_python_version == PythonVersion.PY_37: self.imports.append(IMPORT_ANNOTATIONS) _, sorted_data_models, require_update_action_models = sort_data_models( self.results) results: Dict[Tuple[str, ...], str] = {} module_key = lambda x: (*x.name.split('.')[:-1], ) # process in reverse order to correctly establish module levels grouped_models = groupby( sorted(sorted_data_models.values(), key=module_key, reverse=True), key=module_key, ) for module, models in ((k, [*v]) for k, v in grouped_models): module_path = '.'.join(module) init = False if module: parent = (*module[:-1], '__init__.py') if parent not in results: results[parent] = '' if (*module, '__init__.py') in results: module = (*module, '__init__.py') init = True else: module = (*module[:-1], f'{module[-1]}.py') else: module = ('__init__.py', ) result: List[str] = [] imports = Imports() models_to_update: List[str] = [] for model in models: used_import_names: Set[str] = set() alias_map: Dict[str, Optional[str]] = {} if model.name in require_update_action_models: models_to_update += [model.name] imports.append(model.imports) for field in model.fields: type_hint = field.type_hint if type_hint is None: # pragma: no cover continue for data_type in field.data_types: if '.' not in data_type.type: continue from_, import_ = relative(module_path, data_type.type) full_path = f'{from_}/{import_}' if full_path in alias_map: alias = alias_map[full_path] or import_ else: alias = get_uniq_name(import_, used_import_names) used_import_names.add(import_) alias_map[ full_path] = None if alias == import_ else alias name = data_type.type.rsplit('.', 1)[-1] pattern = re.compile( rf'\b{re.escape(data_type.type)}\b') if from_ and import_: type_hint = pattern.sub(rf'{alias}.{name}', type_hint) else: type_hint = pattern.sub(name, type_hint) field.type_hint = type_hint for ref_name in model.reference_classes: from_, import_ = relative(module_path, ref_name) if init: from_ += "." if from_ and import_: imports.append( Import( from_=from_, import_=import_, alias=alias_map.get(f'{from_}/{import_}'), )) if with_import: result += [imports.dump(), self.imports.dump(), '\n'] code = dump_templates(models) result += [code] if self.dump_resolve_reference_action is not None: result += [ '\n', self.dump_resolve_reference_action(models_to_update) ] body = '\n'.join(result) if format_: body = format_code(body, self.target_python_version) results[module] = body # retain existing behaviour if [*results] == [('__init__.py', )]: return results[('__init__.py', )] return results
def parse( self, with_import: Optional[bool] = True, format_: Optional[bool] = True, settings_path: Optional[Path] = None, ) -> Union[str, Dict[Tuple[str, ...], Result]]: self.parse_raw() if with_import: if self.target_python_version != PythonVersion.PY_36: self.imports.append(IMPORT_ANNOTATIONS) _, sorted_data_models, require_update_action_models = sort_data_models( self.results) results: Dict[Tuple[str, ...], Result] = {} module_key = lambda x: x.module_path # process in reverse order to correctly establish module levels grouped_models = groupby( sorted(sorted_data_models.values(), key=module_key, reverse=True), key=module_key, ) for module, models in ((k, [*v]) for k, v in grouped_models ): # type: Tuple[str, ...], List[DataModel] module_path = '.'.join(module) init = False if module: parent = (*module[:-1], '__init__.py') if parent not in results: results[parent] = Result(body='') if (*module, '__init__.py') in results: module = (*module, '__init__.py') init = True else: module = (*module[:-1], f'{module[-1]}.py') else: module = ('__init__.py', ) result: List[str] = [] imports = Imports() models_to_update: List[str] = [] scoped_model_resolver = ModelResolver() import_map: Dict[str, Tuple[str, str]] = {} model_names: Set[str] = {m.name for m in models} processed_models: Set[str] = set() for model in models: alias_map: Dict[str, Optional[str]] = {} if model.name in require_update_action_models: ref_names = { d.reference.name for f in model.fields for d in f.data_type.all_data_types if d.reference } if model.name in ref_names or (not ref_names - model_names and ref_names - processed_models): models_to_update += [model.name] imports.append(model.imports) processed_models.add(model.name) for field in model.fields: for data_type in field.data_type.all_data_types: # type: DataType if not data_type.type or ( '.' not in data_type.type and data_type.module_name is None): continue type_ = (f"{data_type.module_name}.{data_type.type}" if data_type.module_name else data_type.type) from_, import_ = relative(module_path, type_) full_path = f'{from_}/{import_}' name = type_.rsplit('.', 1)[-1] if data_type.reference: reference = self.model_resolver.get( data_type.reference.path) if reference and ( (isinstance(self.source, Path) and self.source.is_file() and self.source.name == reference.path.split('#/')[0]) or reference.actual_module_name == module_path): if name in model.reference_classes: # pragma: no cover model.reference_classes.remove(name) continue if full_path in alias_map: alias = alias_map[full_path] or import_ else: alias = scoped_model_resolver.add( full_path.split('/'), import_, unique=True).name alias_map[ full_path] = None if alias == import_ else alias new_name = f'{alias}.{name}' if from_ and import_ else name if data_type.module_name and not type_.startswith( from_): import_map[new_name] = ( f'.{type_[:len(new_name) * - 1 - 1]}', new_name.split('.')[0], ) if name in model.reference_classes: model.reference_classes.remove(name) model.reference_classes.add(new_name) data_type.type = new_name for ref_name in model.reference_classes: if ref_name in model_names: continue if ref_name in import_map: from_, import_ = import_map[ref_name] else: from_, import_ = relative(module_path, ref_name) if init: from_ += "." if from_ and import_: # pragma: no cover imports.append( Import( from_=from_, import_=import_, alias=alias_map.get(f'{from_}/{import_}'), )) if with_import: result += [str(imports), str(self.imports), '\n'] code = dump_templates(models) result += [code] if self.dump_resolve_reference_action is not None: result += [ '\n', self.dump_resolve_reference_action(models_to_update) ] body = '\n'.join(result) if format_: body = format_code(body, self.target_python_version, settings_path) results[module] = Result(body=body, source=models[0].path) # retain existing behaviour if [*results] == [('__init__.py', )]: return results[('__init__.py', )].body return results
def parse( self, with_import: Optional[bool] = True, format_: Optional[bool] = True ) -> Union[str, Dict[Tuple[str, ...], str]]: for obj_name, raw_obj in self.base_parser.specification['components'][ 'schemas'].items(): # type: str, Dict obj = JsonSchemaObject.parse_obj(raw_obj) if obj.is_object: self.parse_object(obj_name, obj) elif obj.is_array: self.parse_array(obj_name, obj) elif obj.enum: self.parse_enum(obj_name, obj) elif obj.allOf: self.parse_all_of(obj_name, obj) else: self.parse_root_type(obj_name, obj) if with_import: if self.target_python_version == PythonVersion.PY_37: self.imports.append(IMPORT_ANNOTATIONS) _, sorted_data_models, require_update_action_models = sort_data_models( self.results) results: Dict[Tuple[str, ...], str] = {} module_key = lambda x: (*x.name.split('.')[:-1], ) grouped_models = groupby(sorted(sorted_data_models.values(), key=module_key), key=module_key) for module, models in ((k, [*v]) for k, v in grouped_models): module_path = '.'.join(module) result: List[str] = [] imports = Imports() models_to_update: List[str] = [] for model in models: if model.name in require_update_action_models: models_to_update += [model.name] imports.append(model.imports) for ref_name in model.reference_classes: if '.' not in ref_name: continue ref_path = ref_name.rsplit('.', 1)[0] if ref_path == module_path: continue imports.append(Import(from_='.', import_=ref_path)) if with_import: result += [imports.dump(), self.imports.dump(), '\n'] code = dump_templates(models) result += [code] if self.dump_resolve_reference_action is not None: result += [ '\n', self.dump_resolve_reference_action(models_to_update) ] body = '\n'.join(result) if format_: body = format_code(body, self.target_python_version) if module: module = (*module[:-1], f'{module[-1]}.py') parent = (*module[:-1], '__init__.py') if parent not in results: results[parent] = '' else: module = ('__init__.py', ) results[module] = body # retain existing behaviour if [*results] == [('__init__.py', )]: return results[('__init__.py', )] return results