def operation_parameters(self, operation): result = [] for model in operation.models: if model._in == "body": continue schema = model_schema(model, ref_prefix=REF_PREFIX) num_properties = len(schema["properties"]) if num_properties == 1 and "definitions" in schema: prop_definition = list(schema["definitions"].values())[0] if prop_definition["type"] == "object": # This is a specail case when we group multiple path or query arguments into single schema # https://django-ninja.rest-framework.com/tutorial/path-params/#using-schema schema = prop_definition else: # resolving $refs (seems only for enum) # TODO: better keep that ref in components/schemas/ prop_name = list(schema["properties"].keys())[0] schema["properties"][prop_name] = prop_definition required = set(schema.get("required", [])) for name, details in schema["properties"].items(): param = { "in": model._in, "name": name, "required": name in required } param["schema"] = details result.append(param) return result
def request_body(self, operation: Operation) -> DictStrAny: # TODO: refactor models = [m for m in operation.models if m._in in BODY_PARAMS] if not models: return {} assert len(models) == 1 model = models[0] content_type = self.get_body_content_type(model) if model._in == "body": schema, required = self._create_schema_from_model(model) else: assert model._in in ("form", "file") schema = model_schema(model, ref_prefix=REF_PREFIX) required = True return { "content": { content_type: { "schema": schema } }, "required": required, }
def operation_parameters(self, operation: Operation) -> List[DictStrAny]: result = [] for model in operation.models: if model._in in BODY_PARAMS: continue schema = model_schema(model, ref_prefix=REF_PREFIX) required = set(schema.get("required", [])) properties = schema["properties"] for name, details in properties.items(): is_required = name in required p_name: str p_schema: DictStrAny p_required: bool for p_name, p_schema, p_required in flatten_properties( name, details, is_required, schema.get("definitions", {})): param = { "in": model._in, "name": p_name, "schema": p_schema, "required": p_required, } result.append(param) return result
def _create_schema_from_model( self, model: TModel, by_alias: bool = True, remove_level: bool = True, ) -> Tuple[DictStrAny, bool]: if hasattr(model, "_flatten_map"): schema = self._flatten_schema(model) else: schema = model_schema(cast(Type[BaseModel], model), ref_prefix=REF_PREFIX, by_alias=by_alias) # move Schemas from definitions if schema.get("definitions"): self.add_schema_definitions(schema.pop("definitions")) if remove_level and len(schema["properties"]) == 1: name, details = list(schema["properties"].items())[0] # ref = details["$ref"] required = name in schema.get("required", {}) return details, required else: return schema, True
def request_body(self, operation): # TODO: refactor models = [m for m in operation.models if m._in == "body"] if not models: return {} assert len(models) == 1 schema = model_schema(models[0], ref_prefix=REF_PREFIX) self.add_schema_definitions(schema["definitions"]) properties = list([ (k, v) for k, v in schema["properties"].items() ]) # TODO: can be just list(schema["properties"].items()) ? assert len(properties) == 1 name, details = properties[0] ref = details["$ref"] return { "content": { "application/json": { "schema": { "$ref": ref } } }, "required": name in schema.get("required", {}), }
def _extract_parameters(cls, model: TModel) -> List[DictStrAny]: result = [] schema = model_schema(cast(Type[BaseModel], model), ref_prefix=REF_PREFIX) required = set(schema.get("required", [])) properties = schema["properties"] for name, details in properties.items(): is_required = name in required p_name: str p_schema: DictStrAny p_required: bool for p_name, p_schema, p_required in flatten_properties( name, details, is_required, schema.get("definitions", {})): param = { "in": model._param_source, "name": p_name, "schema": p_schema, "required": p_required, } # copy description from schema description to param description if "description" in p_schema: param["description"] = p_schema["description"] result.append(param) return result
def _create_schema_from_model(self, model): schema = model_schema(model, ref_prefix=REF_PREFIX) self.add_schema_definitions(schema["definitions"]) name, details = list(schema["properties"].items())[0] # ref = details["$ref"] required = name in schema.get("required", {}) return details, required
def _handle_pydantic_model(self, type_annotation: Type) -> Schema: # JsonSchema stores the model (and sub-model) definitions at #/definitions, # but OpenAPI requires them to be placed at "#/components/schemas/" # The ref_prefix changes the references, but the actual schemas are still at #/definitions schema = model_schema(type_annotation, by_alias=True, ref_prefix="#/components/schemas/") if "definitions" in schema.keys(): definitions = schema.pop("definitions") if self.components.schemas is not None: self.components.schemas.update(definitions) return Schema(**schema)
def get_meta_api_schema(yaml_format: bool = False): """Returns current EpiGraphDB API schema, by default as json""" res = { "meta_nodes": { meta_node: model_schema(model) # type: ignore for meta_node, model in meta_node_schema.items() }, "meta_rels": { meta_rel: model_schema(model) # type: ignore for meta_rel, model in meta_rel_schema.items() }, "meta_paths": { meta_rel: {"source": value[0], "target": value[1]} for meta_rel, value in meta_path_schema.items() }, } if yaml_format: res = yaml.dump(yaml.load(json.dumps(res), Loader=yaml.FullLoader)) return res
def _create_schema_from_model(self, model: Type[BaseModel], by_alias: bool = True) -> Tuple[Any, bool]: schema = model_schema(model, ref_prefix=REF_PREFIX, by_alias=by_alias) if schema.get("definitions"): self.add_schema_definitions(schema["definitions"]) name, details = list(schema["properties"].items())[0] # ref = details["$ref"] required = name in schema.get("required", {}) return details, required
def _handle_pydantic_model(self, type_annotation: Type, by_alias: bool = True) -> Schema: # JsonSchema stores the model (and sub-model) definitions at #/definitions, # but OpenAPI requires them to be placed at "#/components/schemas/" # The ref_prefix changes the references, but the actual schemas are still at #/definitions schema = model_schema(type_annotation, by_alias=by_alias, ref_prefix=self.ref_prefix) if "definitions" in schema.keys(): definitions: Dict[str, Dict[str, object]] = schema.pop("definitions") if self.components.schemas is not None: for key, definition in definitions.items(): self.components.schemas[key] = Schema(**definition) return Schema(**schema)
def test_unparameterized_schema_generation(): class FooList(BaseModel): d: List class BarList(BaseModel): d: list assert model_schema(FooList) == { 'title': 'FooList', 'type': 'object', 'properties': {'d': {'items': {}, 'title': 'D', 'type': 'array'}}, 'required': ['d'], } foo_list_schema = model_schema(FooList) bar_list_schema = model_schema(BarList) bar_list_schema['title'] = 'FooList' # to check for equality assert foo_list_schema == bar_list_schema class FooDict(BaseModel): d: Dict class BarDict(BaseModel): d: dict model_schema(Foo) assert model_schema(FooDict) == { 'title': 'FooDict', 'type': 'object', 'properties': {'d': {'title': 'D', 'type': 'object'}}, 'required': ['d'], } foo_dict_schema = model_schema(FooDict) bar_dict_schema = model_schema(BarDict) bar_dict_schema['title'] = 'FooDict' # to check for equality assert foo_dict_schema == bar_dict_schema
def test_dataclass(): @dataclass class Model: a: bool assert schema([Model]) == { 'definitions': { 'Model': { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'boolean'}}, 'required': ['a'], } } } assert model_schema(Model) == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'boolean'}}, 'required': ['a'], }
def get_schema(config_cls: ModelMetaclass): schema = model_schema(config_cls) definitions = schema['definitions'] if 'definitions' in schema else {} return _create_dict(schema, definitions)