def _handle_schema( *, logical_name: str, schema: types.Schema, schemas: types.Schemas ) -> _IntermediaryObjectArtifacts: """ Gather artifacts from the schema. Args: schema: The schema of the object reference. logical_name: The property name of the object reference. schemas: Used to resolve any $ref. Returns: The name of the referenced schema. """ # Read $ref and allOf ref = schema.get("$ref") all_of = schema.get("allOf") if ref is not None: intermediary_obj_artifacts = _handle_ref( logical_name=logical_name, schema=schema, schemas=schemas ) elif all_of is not None: intermediary_obj_artifacts = _handle_all_of( logical_name=logical_name, all_of_schema=all_of, schemas=schemas ) else: raise exceptions.MalformedRelationshipError( "Relationships are defined using either $ref or allOf." ) return intermediary_obj_artifacts
def _peek_key(schema: types.Schema, schemas: types.Schemas, key: str, seen_refs: typing.Set[str]) -> typing.Any: """Implement peek_key.""" # Base case, look for type key value = schema.get(key) if value is not None: return value # Recursive case, look for $ref ref_value = schema.get("$ref") if ref_value is not None: # Check for circular $ref if ref_value in seen_refs: raise exceptions.MalformedSchemaError( "Circular reference detected.") seen_refs.add(ref_value) _, ref_schema = ref.get_ref(ref=ref_value, schemas=schemas) return _peek_key(ref_schema, schemas, key, seen_refs) # Recursive case, look for allOf all_of = schema.get("allOf") if all_of is not None: for sub_schema in all_of: value = _peek_key(sub_schema, schemas, key, seen_refs) if value is not None: return value # Base case, type or ref not found or no type in allOf return None
def _determine_type(*, spec: types.Schema) -> sqlalchemy.sql.type_api.TypeEngine: """ Determine the type for a specification. If no type is found, raises TypeMissingError. If the type is found but is not handled, raises FeatureNotImplementedError. Args: spec: The specification to determine the type for. Returns: The type for the specification. """ # Checking for type spec_type = spec.get("type") if spec_type is None: raise exceptions.TypeMissingError("Every property requires a type.") # Determining the type type_: typing.Optional[sqlalchemy.sql.type_api.TypeEngine] = None if spec_type == "integer": type_ = _handle_integer(spec=spec) elif spec_type == "number": type_ = _handle_number(spec=spec) elif spec_type == "string": type_ = _handle_string(spec=spec) elif spec_type == "boolean": type_ = sqlalchemy.Boolean if type_ is None: raise exceptions.FeatureNotImplementedError( f"{spec['type']} has not been implemented") return type_
def _resolve( name: str, schema: types.Schema, schemas: types.Schemas, seen_refs: typing.Set[str], skip_name: typing.Optional[str], ) -> NameSchema: """Implement resolve.""" # Checking whether schema is a reference schema ref = schema.get(types.OpenApiProperties.REF) if ref is None: return name, schema # Check that ref is string if not isinstance(ref, str): raise exceptions.MalformedSchemaError( "The value of $ref must be a string.") # Check for circular $ref if ref in seen_refs: raise exceptions.MalformedSchemaError( "Circular reference chain detected.") seen_refs.add(ref) ref_name, ref_schema = get_ref(ref=ref, schemas=schemas) # Check if schema should be skipped if ref_name == skip_name: return name, {} return _resolve(ref_name, ref_schema, schemas, seen_refs, skip_name)
def _handle_key_single( *, key: str, schema: types.Schema, default: _TValue, exception_message: str ) -> _TValue: """ Read value and enforce that it only exists once. Raise MalformedRelationshipError is default is not None and the key exists in the schema. ARgs: key: The key to read the value of. schema: The schema to read the value from. default: The default value to return. exception_message: The message raised with the exception. Returns: The value of the key or the default value, """ if key.startswith("x-"): if key == "x-kwargs": sub_value = helpers.ext_prop.get_kwargs( source=schema, reserved={"backref", "secondary"} ) else: sub_value = helpers.ext_prop.get(source=schema, name=key) else: sub_value = schema.get(key) if sub_value is not None: if default is not None: raise exceptions.MalformedRelationshipError(exception_message) return sub_value return default
def resolve_ref( *, name: str, schema: types.Schema, schemas: types.Schemas ) -> NameSchema: """ Resolve reference to another schema. Recursively resolves $ref until $ref key is no longer found. On each step, the name of the schema is recorded. Raises SchemaNotFound is a $ref resolution fails. Args: name: The name of the schema from the last step. schema: The specification of the schema from the last step. schemas: Dictionary with all defined schemas used to resolve $ref. Returns: The first schema that no longer has the $ref key and the name of that schema. """ # Checking whether schema is a reference schema ref = schema.get("$ref") if ref is None: return name, schema ref_name, ref_schema = get_ref(ref=ref, schemas=schemas) return resolve_ref(name=ref_name, schema=ref_schema, schemas=schemas)
def _handle_object_reference(*, spec: types.Schema, schemas: types.Schemas) -> types.Schema: """ Determine the foreign key schema for an object reference. Args: spec: The schema of the object reference. schemas: All defined schemas. Returns: The foreign key schema. """ tablename = helpers.get_ext_prop(source=spec, name="x-tablename") if not tablename: raise exceptions.MalformedSchemaError( "Referenced object is missing x-tablename property.") properties = spec.get("properties") if properties is None: raise exceptions.MalformedSchemaError( "Referenced object does not have any properties.") logical_name = "id" id_spec = properties.get(logical_name) if id_spec is None: raise exceptions.MalformedSchemaError( "Referenced object does not have id property.") # Preparing specification prepared_id_spec = helpers.prepare_schema(schema=id_spec, schemas=schemas) id_type = prepared_id_spec.get("type") if id_type is None: raise exceptions.MalformedSchemaError( "Referenced object id property does not have a type.") return {"type": id_type, "x-foreign-key": f"{tablename}.id"}
def _resolve( name: str, schema: types.Schema, schemas: types.Schemas, seen_refs: typing.Set[str], skip_name: typing.Optional[str], ) -> NameSchema: """Implement resolve.""" # Checking whether schema is a reference schema ref = schema.get("$ref") if ref is None: return name, schema # Check for circular $ref if ref in seen_refs: raise exceptions.MalformedSchemaError( "Circular reference chain detected.") seen_refs.add(ref) ref_name, ref_schema = get_ref(ref=ref, schemas=schemas) # Check if schema should be skipped if ref_name == skip_name: return name, {} return _resolve(ref_name, ref_schema, schemas, seen_refs, skip_name)
def calculate(*, instance: typing.Any, properties: types.Schema) -> str: """ Calculate the repr for the model. The repr is the string that would be needed to create an equivalent instance of the model. Args: instance: The model instance to calculate the repr for. properties: The properties of the model instance. Returns: The string that would be needed to create an equivalent instance of the model. """ # Calculate the name name = type(instance).__name__ # Retrieve property values prop_repr_gen = ((prop, repr(getattr(instance, prop, None))) for prop in properties.keys()) prop_repr_str_gen = (f"{name}={value}" for name, value in prop_repr_gen) prop_repr_str = ", ".join(prop_repr_str_gen) # Calculate repr return f"open_alchemy.models.{name}({prop_repr_str})"
def _prepare_schema_object_common( *, schema: types.Schema, schemas: types.Schemas, array_context: bool ) -> types.ReadOnlySchemaObjectCommon: """ Check and transform readOnly schema to consistent format. Args: schema: The readOnly schema to operate on. schemas: Used to resolve any $ref. array_context: Whether checking is being done at the array items level. Changes exception messages and schema validation. Returns: The schema in a consistent format. """ # Check type try: type_ = helpers.peek.type_(schema=schema, schemas=schemas) except exceptions.TypeMissingError: raise exceptions.MalformedSchemaError( "Every readOnly property must have a type." if not array_context else "Array readOnly items must have a type." ) schema = helpers.prepare_schema(schema=schema, schemas=schemas) if type_ != "object": raise exceptions.MalformedSchemaError( "readOnly array item type must be an object." if array_context else "readyOnly property must be of type array or object." ) # Handle object properties = schema.get("properties") if properties is None: raise exceptions.MalformedSchemaError( "readOnly object definition must include properties." ) if not properties: raise exceptions.MalformedSchemaError( "readOnly object definition must include at least 1 property." ) # Initialize schema properties to return properties_schema: types.Schema = {} # Process properties for property_name, property_schema in properties.items(): property_type = helpers.peek.type_(schema=property_schema, schemas=schemas) if property_type in {"array", "object"}: raise exceptions.MalformedSchemaError( "readOnly object properties cannot be of type array nor object." ) properties_schema[property_name] = {"type": property_type} return {"type": "object", "properties": properties_schema}
def gather(*, schema: types.Schema, schemas: types.Schemas, logical_name: str) -> types.ObjectArtifacts: """ Gather artifacts for constructing a reference to another model from within an array. Args: schema: The schema of the array reference. schemas: All the model schemas used to resolve any $ref within the array reference schema. logical_name: The name of thearray reference within its parent schema. Returns: The artifacts required to construct the array reference. """ # Resolve any allOf and $ref schema = helpers.schema.prepare(schema=schema, schemas=schemas) # Get item schema item_schema = schema.get("items") if item_schema is None: raise exceptions.MalformedRelationshipError( "An array property must include items property.") # Retrieve artifacts for the object reference within the array artifacts = object_ref.artifacts.gather(schema=item_schema, logical_name=logical_name, schemas=schemas) # Check for uselist if (artifacts.relationship.back_reference is not None and artifacts.relationship.back_reference.uselist is not None): raise exceptions.MalformedRelationshipError( "x-uselist is not supported for one to many nor many to many relationships." ) # Check for nullable if artifacts.nullable is not None: raise exceptions.MalformedRelationshipError( "nullable is not supported for one to many nor many to many relationships." ) # Check referenced specification ref_schema = helpers.schema.prepare(schema=artifacts.spec, schemas=schemas) ref_tablename = helpers.ext_prop.get(source=ref_schema, name="x-tablename") if ref_tablename is None: raise exceptions.MalformedRelationshipError( "One to many relationships must reference a schema with " "x-tablename defined.") # Add description try: description = helpers.peek.description(schema=schema, schemas={}) except exceptions.MalformedSchemaError as exc: raise exceptions.MalformedRelationshipError(str(exc)) if description is not None: artifacts.description = description return artifacts
def _merge(schema: types.Schema, schemas: types.Schemas, skip_name: typing.Optional[str]) -> types.Schema: """Implement merge.""" all_of = schema.get("allOf") if all_of is None: return schema merged_schema: types.Schema = {} for sub_schema in all_of: # Resolving any $ref _, ref_schema = ref.resolve(name="", schema=sub_schema, schemas=schemas, skip_name=skip_name) # Merging any nested allOf merged_sub_schema = _merge(ref_schema, schemas, skip_name) # Capturing required arrays merged_required = merged_schema.get(types.OpenApiProperties.REQUIRED) sub_required = merged_sub_schema.get(types.OpenApiProperties.REQUIRED) # Capturing properties merged_properties = merged_schema.get( types.OpenApiProperties.PROPERTIES) sub_properties = merged_sub_schema.get( types.OpenApiProperties.PROPERTIES) # Capturing backrefs merged_backrefs = merged_schema.get(types.ExtensionProperties.BACKREFS) sub_backrefs = merged_sub_schema.get( types.ExtensionProperties.BACKREFS) # Combining sub into merged specification merged_schema = {**merged_schema, **merged_sub_schema} # Checking whether required was present on both specs if merged_required is not None and sub_required is not None: # Both have a required array, need to merge them together required_set = set(merged_required).union(sub_required) merged_schema[types.OpenApiProperties.REQUIRED] = list( required_set) # Checking whether properties was present on both specs if merged_properties is not None and sub_properties is not None: # Both have properties, merge properties merged_schema[types.OpenApiProperties.PROPERTIES] = { **merged_properties, **sub_properties, } # Checking whether backrefs was present on both specs if merged_backrefs is not None and sub_backrefs is not None: # Both have backrefs, merge backrefs merged_schema[types.ExtensionProperties.BACKREFS] = { **merged_backrefs, **sub_backrefs, } return merged_schema
def peek_key( schema: types.Schema, schemas: types.Schemas, key: str, seen_refs: typing.Set[str], skip_ref: typing.Optional[str], ) -> typing.Any: """Execute peek_key.""" check_schema_schemas_dict(schema, schemas) # Base case, look for type key keys = ( [key.replace("x-", prefix) for prefix in types.KeyPrefixes] if key.startswith("x-") else [key] ) value = next(filter(lambda value: value is not None, map(schema.get, keys)), None) if value is not None: return value # Recursive case, look for $ref ref_value = schema.get(types.OpenApiProperties.REF) if ref_value is not None: ref_value_str = check_ref_string(ref_value) check_circular_ref(ref_value_str, seen_refs) ref_name, ref_schema = ref_helper.get_ref(ref=ref_value_str, schemas=schemas) if skip_ref is not None and ref_name == skip_ref: return None return peek_key(ref_schema, schemas, key, seen_refs, skip_ref) # Recursive case, look for allOf all_of = schema.get("allOf") if all_of is not None: all_of_list = check_all_of_list(all_of) for sub_schema in all_of_list: sub_schema_dict = check_sub_schema_dict(sub_schema) value = peek_key(sub_schema_dict, schemas, key, seen_refs, skip_ref) if value is not None: return value # Base case, type or ref not found or no type in allOf return None
def _handle_integer( *, spec: types.Schema ) -> typing.Union[sqlalchemy.Integer, sqlalchemy.BigInteger]: """ Determine the type of integer to use for the schema. Args: spec: The schema for the integer column. Returns: Integer or BigInteger depending on the format. """ if spec.get("format", "int32") == "int32": return sqlalchemy.Integer if spec.get("format") == "int64": return sqlalchemy.BigInteger raise exceptions.FeatureNotImplementedError( f"{spec.get('format')} format for integer is not supported.")
def merge_all_of(*, schema: types.Schema, schemas: types.Schemas) -> types.Schema: """ Merge schemas under allOf statement. Merges schemas under allOf statement which is expected to have a list of schemas. Any duplicate keys will be overridden. Schemas are processed in the order they are listed. Args: schema: The schema to operate on. schemas: Used to resolve any $ref. Returns: The schema with all top level allOf statements resolved. """ all_of = schema.get("allOf") if all_of is None: return schema merged_schema: types.Schema = {} for sub_schema in all_of: # Resolving any $ref _, ref_schema = resolve_ref(name="", schema=sub_schema, schemas=schemas) # Merging any nested allOf merged_sub_schema = merge_all_of(schema=ref_schema, schemas=schemas) # Capturing required arrays merged_required = merged_schema.get("required") sub_required = merged_sub_schema.get("required") # Capturing properties merged_properties = merged_schema.get("properties") sub_properties = merged_sub_schema.get("properties") # Combining sub into merged specification merged_schema = {**merged_schema, **merged_sub_schema} # Checking whether required was present on both specs if merged_required is not None and sub_required is not None: # Both have a required array, need to merge them together required_set = set(merged_required).union(sub_required) merged_schema["required"] = list(required_set) # Checking whether properties was present on both specs if merged_properties is not None and sub_properties is not None: # Both have properties, merge properties merged_schema["properties"] = { **merged_properties, **sub_properties } return merged_schema
def prefer_local( get_value: PeekValue, schema: types.Schema, schemas: types.Schemas, seen_refs: typing.Set[str], ) -> typing.Any: """Execute prefer_local.""" check_schema_schemas_dict(schema, schemas) # Handle $ref ref_value = schema.get(types.OpenApiProperties.REF) if ref_value is not None: ref_value_str = check_ref_string(ref_value) check_circular_ref(ref_value_str, seen_refs) _, ref_schema = ref_helper.get_ref(ref=ref_value_str, schemas=schemas) return prefer_local(get_value, ref_schema, schemas, seen_refs) # Handle allOf all_of = schema.get("allOf") if all_of is not None: all_of_list = check_all_of_list(all_of) all_of_list_dict = map(check_sub_schema_dict, all_of_list) # Order putting any $ref last sorted_all_of = sorted( all_of_list_dict, key=lambda sub_schema: sub_schema.get(types.OpenApiProperties.REF) is not None, ) def map_to_value(sub_schema: types.Schema) -> typing.Any: """Use get_value to turn the schema into the value.""" return prefer_local(get_value, sub_schema, schemas, seen_refs) retrieved_values = map(map_to_value, sorted_all_of) not_none_retrieved_values = filter( lambda value: value is not None, retrieved_values ) retrieved_value = next(not_none_retrieved_values, None) return retrieved_value return get_value(schema=schema, schemas=schemas)
def _handle_string(*, spec: types.Schema) -> sqlalchemy.String: """ Determine the setup of the string to use for the schema. Args: spec: The schema for the string column. Returns: String. """ return sqlalchemy.String(length=spec.get("maxLength"))
def init_model_factory(*, base: typing.Type, spec: oa_types.Schema, define_all: bool = False) -> oa_types.ModelFactory: """ Create factory that generates SQLAlchemy models based on OpenAPI specification. Args: base: The declarative base for the models. spec: The OpenAPI specification in the form of a dictionary. define_all: Whether to define all the models during initialization. Returns: A factory that returns SQLAlchemy models derived from the base based on the OpenAPI specification. """ # Retrieving the schema from the specification if "components" not in spec: raise exceptions.MalformedSpecificationError( '"components" is a required key in the specification.') components = spec.get("components", {}) if "schemas" not in components: raise exceptions.MalformedSpecificationError( '"schemas" is a required key in the components of the specification.' ) schemas = components.get("schemas", {}) # Binding the base and schemas bound_model_factories = functools.partial(_model_factory.model_factory, schemas=schemas, base=base) # Caching calls cached_model_factories = functools.lru_cache( maxsize=None)(bound_model_factories) # Making Base importable setattr(models, "Base", base) # Intercepting factory calls to make models available def _register_model(*, name: str) -> typing.Type: """Intercept calls to model factory and register model on models.""" model = cached_model_factories(name=name) setattr(models, name, model) return model if define_all: _helpers.define_all(model_factory=_register_model, schemas=schemas) return _register_model
def peek_key(*, schema: types.Schema, schemas: types.Schemas, key: str) -> typing.Any: """Recursive type lookup.""" # Base case, look for type key value = schema.get(key) if value is not None: return value # Recursive case, look for $ref ref_value = schema.get("$ref") if ref_value is not None: _, ref_schema = ref.get_ref(ref=ref_value, schemas=schemas) return peek_key(schema=ref_schema, schemas=schemas, key=key) # Recursive case, look for allOf all_of = schema.get("allOf") if all_of is not None: for sub_schema in all_of: value = peek_key(schema=sub_schema, schemas=schemas, key=key) if value is not None: return value # Base case, type or ref not found or no type in allOf return None
def _peek_type(*, schema: types.Schema, schemas: types.Schemas) -> typing.Optional[str]: """Recursive type lookup.""" # Base case, look for type key type_ = schema.get("type") if type_ is not None: return type_ # Recursive case, look for $ref ref = schema.get("$ref") if ref is not None: _, ref_schema = get_ref(ref=ref, schemas=schemas) return _peek_type(schema=ref_schema, schemas=schemas) # Recursive case, look for allOf all_of = schema.get("allOf") if all_of is not None: for sub_schema in all_of: type_ = _peek_type(schema=sub_schema, schemas=schemas) if type_ is not None: return type_ # Base case, type or ref not found or no type in allOf return None
def _handle_number(*, spec: types.Schema) -> sqlalchemy.Float: """ Determine the type of number to use for the schema. Args: spec: The schema for the number column. Returns: Float. """ if spec.get("format", "float") == "float": return sqlalchemy.Float raise exceptions.FeatureNotImplementedError( f"{spec.get('format')} format for number is not supported.")
def _prepare_schema_array( *, schema: types.Schema, schemas: types.Schemas ) -> types.ReadOnlyArraySchema: """ Check and transform readOnly schema to consistent format. Args: schema: The readOnly schema to operate on. schemas: Used to resolve any $ref. Returns: The schema in a consistent format. """ schema = helpers.prepare_schema(schema=schema, schemas=schemas) items_schema = schema.get("items") if items_schema is None: raise exceptions.MalformedSchemaError("A readOnly array must define its items.") array_object_schema = _prepare_schema_object_common( schema=items_schema, schemas=schemas, array_context=True ) return {"type": "array", "readOnly": True, "items": array_object_schema}
def _calculate_nullable(*, spec: types.Schema, required: typing.Optional[bool]) -> bool: """ Calculate the value of the nullable field. The following is the truth table for the nullable property. required | schema nullable | returned nullable -------------------------------------------------------- None | not given | True None | False | False None | True | True False | not given | True False | False | False False | True | True True | not given | False True | False | False True | True | True To summarize, if nullable is the schema the value for it is used. Otherwise True is returned unless required is True. Args: spec: The schema for the column. required: Whether the property is required. Returns: The nullable value for the column. """ nullable = spec.get("nullable") if nullable is None: if required: return False return True if nullable: return True return False
def handle_object_reference(*, spec: types.Schema, schemas: types.Schemas, fk_column: str) -> types.Schema: """ Determine the foreign key schema for an object reference. Args: spec: The schema of the object reference. schemas: All defined schemas. fk_column: The foreign column name to use. Returns: The foreign key schema. """ tablename = helpers.get_ext_prop(source=spec, name="x-tablename") if not tablename: raise exceptions.MalformedSchemaError( "Referenced object is missing x-tablename property.") properties = spec.get("properties") if properties is None: raise exceptions.MalformedSchemaError( "Referenced object does not have any properties.") fk_logical_name = fk_column if fk_column is not None else "id" fk_spec = properties.get(fk_logical_name) if fk_spec is None: raise exceptions.MalformedSchemaError( f"Referenced object does not have {fk_logical_name} property.") # Preparing specification prepared_fk_spec = helpers.prepare_schema(schema=fk_spec, schemas=schemas) fk_type = prepared_fk_spec.get("type") if fk_type is None: raise exceptions.MalformedSchemaError( f"Referenced object {fk_logical_name} property does not have a type." ) return {"type": fk_type, "x-foreign-key": f"{tablename}.{fk_logical_name}"}
def _many_to_many_column_artifacts( *, model_schema: types.Schema, schemas: types.Schemas) -> _ManyToManyColumnArtifacts: """ Retrieve column artifacts of a secondary table for a many to many relationship. Args: model_schema: The schema for one side of the many to many relationship. schemas: Used to resolve any $ref. Returns: The artifacts needed to construct a column of the secondary table in a many to many relationship. """ # Resolve $ref and merge allOf model_schema = helpers.prepare_schema(schema=model_schema, schemas=schemas) # Check schema type model_type = model_schema.get("type") if model_type is None: raise exceptions.MalformedSchemaError("Every schema must have a type.") if model_type != "object": raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must be of type " "object.") # Retrieve table name tablename = helpers.get_ext_prop(source=model_schema, name="x-tablename") if tablename is None: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must set the " "x-tablename property.") # Find primary key properties = model_schema.get("properties") if properties is None: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must have properties." ) if not properties: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must have at least 1 " "property.") type_ = None format_ = None for property_name, property_schema in properties.items(): if helpers.peek.primary_key(schema=property_schema, schemas=schemas): if type_ is not None: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must have " "exactly 1 primary key.") try: type_ = helpers.peek.type_(schema=property_schema, schemas=schemas) except exceptions.TypeMissingError: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must define " "a type for the primary key.") format_ = helpers.peek.format_(schema=property_schema, schemas=schemas) max_length = helpers.peek.max_length(schema=property_schema, schemas=schemas) column_name = property_name if type_ is None: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must have " "exactly 1 primary key.") if type_ in {"object", "array"}: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship cannot define it's " "primary key to be of type object nor array.") return _ManyToManyColumnArtifacts(type_, format_, tablename, column_name, max_length)
def gather_artifacts( *, model_schema: types.Schema, logical_name: str, schemas: types.Schemas, fk_column: str, required: typing.Optional[bool] = None, nullable: typing.Optional[bool] = None, ) -> typing.Tuple[str, types.ColumnArtifacts]: """ Gather artifacts for a foreign key to implement an object reference. Assume any object schema level allOf and $ref have already been resolved. Raise MalformedSchemaError if x-tablename or properties are missing. Also raise if the foreign key column is not found in the model schema or it does not have a type. Args: model_schema: The schema of the referenced model. logical_name: The logical name of the property with the object reference. schemas: All model schemas used to resolve any $ref. fk_column: The name of the foreign key column. required: Whether the foreign key is constructed for a property that is required. nullable: Whether the foreign key is constructed for a property that is nullable. Returns: The logical name of the foreign key and the artifacts required to construct it. """ tablename = helpers.ext_prop.get(source=model_schema, name="x-tablename") if not tablename: raise exceptions.MalformedSchemaError( "Referenced object is missing x-tablename property.") properties = model_schema.get("properties") if properties is None: raise exceptions.MalformedSchemaError( "Referenced object does not have any properties.") fk_schema = properties.get(fk_column) if fk_schema is None: raise exceptions.MalformedSchemaError( f"Referenced object does not have {fk_column} property.") # Gather artifacts try: fk_type = helpers.peek.type_(schema=fk_schema, schemas=schemas) except exceptions.TypeMissingError: raise exceptions.MalformedSchemaError( f"Referenced object {fk_column} property does not have a type.") fk_format = helpers.peek.format_(schema=fk_schema, schemas=schemas) fk_max_length = helpers.peek.max_length(schema=fk_schema, schemas=schemas) fk_default = helpers.peek.default(schema=fk_schema, schemas=schemas) nullable = helpers.calculate_nullable( nullable=nullable, generated=False, required=required, defaulted=fk_default is not None, ) # Construct return values return_logical_name = f"{logical_name}_{fk_column}" artifacts = types.ColumnArtifacts( open_api=types.OpenAPiColumnArtifacts( type=fk_type, format=fk_format, nullable=nullable, max_length=fk_max_length, default=fk_default, ), extension=types.ExtensionColumnArtifacts( foreign_key=f"{tablename}.{fk_column}"), ) return return_logical_name, artifacts
def calculate(*, schema: oa_types.Schema, name: str) -> types.ModelArtifacts: """ Calculate the model artifacts from the schema. Args: schema: The schema of the model name: The name of the model. Returns: The artifacts for the model. """ # Resolve inheritance chain schema = helpers.inheritance.retrieve_model_parents_schema(schema=schema) required = set(schema.get("required", [])) description = helpers.peek.description(schema=schema, schemas={}) # Convert schemas to artifacts prop_schemas = schema["properties"].values() prop_required_list = [ key in required for key in schema["properties"].keys() ] columns_artifacts = list( map(gather_column_artifacts, prop_schemas, prop_required_list)) # Calculate artifacts for columns columns = list( map(_calculate_column_artifacts, schema["properties"].keys(), columns_artifacts)) # Calculate artifacts for the typed dictionary write_only_idx = [ artifact.open_api.write_only for artifact in columns_artifacts ] typed_dict_props = list( map( _calculate_typed_dict_artifacts, (value for idx, value in enumerate(schema["properties"].keys()) if not write_only_idx[idx]), (value for idx, value in enumerate(columns_artifacts) if not write_only_idx[idx]), )) typed_dict_required_props = [ typed_dict_prop for prop_required, typed_dict_prop in zip( prop_required_list, typed_dict_props) if prop_required ] typed_dict_not_required_props = [ typed_dict_prop for prop_required, typed_dict_prop in zip( prop_required_list, typed_dict_props) if not prop_required ] # Calculate artifacts for the arguments args = list( map(_calculate_arg_artifacts, schema["properties"].keys(), columns_artifacts)) required_args = [ arg for prop_required, arg in zip(prop_required_list, args) if prop_required ] not_required_args = [ arg for prop_required, arg in zip(prop_required_list, args) if not prop_required ] # Calculate artifacts for back references backrefs = helpers.ext_prop.get(source=schema, name="x-backrefs") if backrefs is not None: backref_column_artifacts = map(gather_column_artifacts, backrefs.values(), itertools.repeat(None)) backref_columns_iter = map(_calculate_column_artifacts, backrefs.keys(), backref_column_artifacts) columns.extend(backref_columns_iter) # Calculate model parent class parent_cls: str if sys.version_info[1] < 8: parent_cls = "typing_extensions.Protocol" else: # version compatibility parent_cls = "typing.Protocol" # Calculate whether property lists are empty, their names and parent class typed_dict_required_empty = not typed_dict_required_props typed_dict_not_required_empty = not typed_dict_not_required_props typed_dict_required_name = None typed_dict_not_required_name: typing.Optional[str] = f"{name}Dict" typed_dict_required_parent_class = None typed_dict_not_required_parent_class: typing.Optional[str] if sys.version_info[1] < 8: typed_dict_not_required_parent_class = "typing_extensions.TypedDict" else: # version compatibility typed_dict_not_required_parent_class = "typing.TypedDict" if not typed_dict_required_empty and not typed_dict_not_required_empty: typed_dict_required_parent_class = typed_dict_not_required_parent_class typed_dict_required_name = f"_{name}DictBase" typed_dict_not_required_parent_class = typed_dict_required_name if not typed_dict_required_empty and typed_dict_not_required_empty: typed_dict_required_name = typed_dict_not_required_name typed_dict_not_required_name = None typed_dict_required_parent_class = typed_dict_not_required_parent_class typed_dict_not_required_parent_class = None return types.ModelArtifacts( sqlalchemy=types.SQLAlchemyModelArtifacts( name=name, columns=columns, empty=not columns, arg=types.ArgArtifacts(required=required_args, not_required=not_required_args), parent_cls=parent_cls, description=description, ), typed_dict=types.TypedDictArtifacts( required=types.TypedDictClassArtifacts( props=typed_dict_required_props, empty=typed_dict_required_empty, name=typed_dict_required_name, parent_class=typed_dict_required_parent_class, ), not_required=types.TypedDictClassArtifacts( props=typed_dict_not_required_props, empty=typed_dict_not_required_empty, name=typed_dict_not_required_name, parent_class=typed_dict_not_required_parent_class, ), ), )
def init_model_factory( *, base: typing.Type, spec: oa_types.Schema, models_filename: typing.Optional[str] = None, spec_path: typing.Optional[str] = None, ) -> oa_types.ModelFactory: """ Create factory that generates SQLAlchemy models based on OpenAPI specification. Args: base: The declarative base for the models. spec: The OpenAPI specification in the form of a dictionary. models_filename: The name of the file to write the models typing information to. spec_path: The path the the OpenAPI specification. Mainly used to support remote references. Returns: A factory that returns SQLAlchemy models derived from the base based on the OpenAPI specification. """ # Record the spec path if spec_path is not None: _helpers.ref.set_context(path=spec_path) # Retrieving the schema from the specification if "components" not in spec: raise exceptions.MalformedSpecificationError( '"components" is a required key in the specification.') components = spec.get("components", {}) if "schemas" not in components: raise exceptions.MalformedSpecificationError( '"schemas" is a required key in the components of the specification.' ) schemas = components.get("schemas", {}) # Pre-processing schemas _schemas_module.process(schemas=schemas) # Getting artifacts schemas_artifacts = _schemas_module.artifacts.get_from_schemas( schemas=schemas, stay_within_model=True) # Binding the base and schemas bound_model_factories = functools.partial( _model_factory.model_factory, schemas=schemas, artifacts=schemas_artifacts, get_base=_get_base, ) # Caching calls cached_model_factories = functools.lru_cache( maxsize=None)(bound_model_factories) # Making Base importable setattr(models, "Base", base) # Intercept factory calls to make models available def _register_model(*, name: str) -> typing.Type: """Intercept calls to model factory and register model on models.""" model = cached_model_factories(name=name) setattr(models, name, model) return model if models_filename is not None: schemas_artifacts = _schemas_module.artifacts.get_from_schemas( schemas=schemas, stay_within_model=False) models_file_contents = _models_file.generate( artifacts=schemas_artifacts) with open(models_filename, "w") as out_file: out_file.write(models_file_contents) _helpers.define_all(model_factory=_register_model, schemas=schemas) return _register_model
def calculate(*, schema: oa_types.Schema, name: str) -> types.ModelArtifacts: """ Calculate the model artifacts from the schema. Args: schema: The schema of the model name: The name of the model. Returns: The artifacts for the model. """ required = set(schema.get("required", [])) description = helpers.peek.description(schema=schema, schemas={}) # Initialize lists columns: typing.List[types.ColumnArtifacts] = [] td_required_props: typing.List[types.ColumnArtifacts] = [] td_not_required_props: typing.List[types.ColumnArtifacts] = [] required_args: typing.List[types.ColumnArgArtifacts] = [] not_required_args: typing.List[types.ColumnArgArtifacts] = [] # Calculate artifacts for properties for property_name, property_schema in schema["properties"].items(): # Gather artifacts property_required = property_name in required column_artifacts = gather_column_artifacts(schema=property_schema, required=property_required) # Calculate the type column_type = _type.model(artifacts=column_artifacts) td_prop_type = _type.typed_dict(artifacts=column_artifacts) arg_init_type = _type.arg_init(artifacts=column_artifacts) arg_from_dict_type = _type.arg_from_dict(artifacts=column_artifacts) # Add artifacts to the lists columns.append( types.ColumnArtifacts( type=column_type, name=property_name, description=column_artifacts.description, )) prop_artifacts = types.ColumnArtifacts(type=td_prop_type, name=property_name) arg_artifacts = types.ColumnArgArtifacts( init_type=arg_init_type, from_dict_type=arg_from_dict_type, name=property_name, default=_map_default(artifacts=column_artifacts), ) if property_required: td_required_props.append(prop_artifacts) required_args.append(arg_artifacts) else: td_not_required_props.append(prop_artifacts) not_required_args.append(arg_artifacts) # Calculate artifacts for back references backrefs = helpers.ext_prop.get(source=schema, name="x-backrefs") if backrefs is not None: for backref_name, backref_schema in backrefs.items(): # Gather artifacts column_artifacts = gather_column_artifacts(schema=backref_schema, required=None) # Calculate the type column_type = _type.model(artifacts=column_artifacts) # Add artifacts to the lists columns.append( types.ColumnArtifacts(type=column_type, name=backref_name)) # Calculate model parent class parent_cls: str if sys.version_info[1] < 8: parent_cls = "typing_extensions.Protocol" else: # version compatibility parent_cls = "typing.Protocol" # Calculate whether property lists are empty, their names and parent class td_required_empty = not td_required_props td_not_required_empty = not td_not_required_props td_required_name = None td_not_required_name: typing.Optional[str] = f"{name}Dict" td_required_parent_class = None td_not_required_parent_class: typing.Optional[str] if sys.version_info[1] < 8: td_not_required_parent_class = "typing_extensions.TypedDict" else: # version compatibility td_not_required_parent_class = "typing.TypedDict" if not td_required_empty and not td_not_required_empty: td_required_parent_class = td_not_required_parent_class td_required_name = f"_{name}DictBase" td_not_required_parent_class = td_required_name if not td_required_empty and td_not_required_empty: td_required_name = td_not_required_name td_not_required_name = None td_required_parent_class = td_not_required_parent_class td_not_required_parent_class = None return types.ModelArtifacts( sqlalchemy=types.SQLAlchemyModelArtifacts( name=name, columns=columns, empty=not columns, arg=types.ArgArtifacts(required=required_args, not_required=not_required_args), parent_cls=parent_cls, description=description, ), typed_dict=types.TypedDictArtifacts( required=types.TypedDictClassArtifacts( props=td_required_props, empty=td_required_empty, name=td_required_name, parent_class=td_required_parent_class, ), not_required=types.TypedDictClassArtifacts( props=td_not_required_props, empty=td_not_required_empty, name=td_not_required_name, parent_class=td_not_required_parent_class, ), ), )
def _gather_column_artifacts(*, model_schema: types.Schema, schemas: types.Schemas) -> _ColumnArtifacts: """ Retrieve column artifacts of a secondary table for a many to many relationship. The model primary key is used as the base of the foreign key column in the secondary table for the model. It is assumed that the model has a single primary key column. Raise MalformedSchemaError if the model schema does not have a type, has a type but is not an object, does not have the x-tablename property, does not have any properties, does not have exactly 1 primary key, the primary key column doesn't define a type or the type is an object or array. Args: model_schema: The schema for one side of the many to many relationship. schemas: Used to resolve any $ref. Returns: The artifacts needed to construct a column of the secondary table in a many to many relationship. """ # Resolve $ref and merge allOf model_schema = helpers.schema.prepare(schema=model_schema, schemas=schemas) # Check schema type model_type = model_schema.get("type") if model_type is None: raise exceptions.MalformedSchemaError("Every schema must have a type.") if model_type != "object": raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must be of type " "object.") # Retrieve table name tablename = helpers.ext_prop.get(source=model_schema, name="x-tablename") if tablename is None: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must set the " "x-tablename property.") # Find primary key properties = model_schema.get("properties") if properties is None: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must have properties." ) if not properties: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must have at least 1 " "property.") type_ = None format_ = None for property_name, property_schema in properties.items(): if helpers.peek.primary_key(schema=property_schema, schemas=schemas): # Check whether this is the first primary key that has been encountered if type_ is not None: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must have " "exactly 1 primary key.") # Gather artifacts for constructing the foreign key column try: type_ = helpers.peek.type_(schema=property_schema, schemas=schemas) except exceptions.TypeMissingError: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must define " "a type for the primary key.") format_ = helpers.peek.format_(schema=property_schema, schemas=schemas) max_length = helpers.peek.max_length(schema=property_schema, schemas=schemas) column_name = property_name # Check whether at least 1 primary key column has been found if type_ is None: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship must have " "exactly 1 primary key.") # Check that the type is for a column if type_ in {"object", "array"}: raise exceptions.MalformedSchemaError( "A schema that is part of a many to many relationship cannot define it's " "primary key to be of type object nor array.") return _ColumnArtifacts( type=type_, format=format_, tablename=tablename, column_name=column_name, max_length=max_length, )