from open_alchemy import init_yaml init_yaml("column-example-spec.yml", models_filename="column_models_auto.py")
from open_alchemy import init_yaml init_yaml("simple-example-spec.yml")
def test_models_file(tmp_path): """ GIVEN specification stored in a YAML file WHEN init_yaml is called with the file and a models file path THEN the models are written to the models file. """ # pylint: disable=import-error,import-outside-toplevel # Generate spec file directory = tmp_path / "specs" directory.mkdir() spec_file = directory / "spec.yaml" spec_file.write_text(yaml.dump(BASIC_SPEC)) # Create models file models_file = directory / "models.py" # Creating model factory open_alchemy.init_yaml(str(spec_file), define_all=True, models_filename=str(models_file)) # Check models file contents models_file_contents = models_file.read_text() docstring = '"""Autogenerated SQLAlchemy models based on OpenAlchemy models."""' additional_import = "" if sys.version_info[1] < 8: additional_import = """ import typing_extensions""" expected_td_base = "typing.TypedDict" if sys.version_info[1] < 8: expected_td_base = "typing_extensions.TypedDict" expected_model_base = "typing.Protocol" if sys.version_info[1] < 8: expected_model_base = "typing_extensions.Protocol" expected_contents = f'''{docstring} # pylint: disable=no-member,super-init-not-called,unused-argument import typing import sqlalchemy{additional_import} from sqlalchemy import orm from open_alchemy import models class TableDict({expected_td_base}, total=False): """TypedDict for properties that are not required.""" column: typing.Optional[int] class TTable({expected_model_base}): """ SQLAlchemy model protocol. Attrs: column: The column of the Table. """ # SQLAlchemy properties __table__: sqlalchemy.Table __tablename__: str query: orm.Query # Model properties column: typing.Optional[int] def __init__(self, column: typing.Optional[int] = None) -> None: """ Construct. Args: column: The column of the Table. """ ... @classmethod def from_dict(cls, column: typing.Optional[int] = None) -> "TTable": """ Construct from a dictionary (eg. a POST payload). Args: column: The column of the Table. Returns: Model instance based on the dictionary. """ ... @classmethod def from_str(cls, value: str) -> "TTable": """ Construct from a JSON string (eg. a POST payload). Returns: Model instance based on the JSON string. """ ... def to_dict(self) -> TableDict: """ Convert to a dictionary (eg. to send back for a GET request). Returns: Dictionary based on the model instance. """ ... def to_str(self) -> str: """ Convert to a JSON string (eg. to send back for a GET request). Returns: JSON string based on the model instance. """ ... Table: TTable = models.Table # type: ignore ''' assert models_file_contents == expected_contents
from open_alchemy import init_yaml init_yaml("relationship-many-to-one-example-spec.yml")
from open_alchemy import init_yaml init_yaml("all-of-model-example-spec.yml")
def test_import_many_to_many_association(engine, sessionmaker, tmp_path): """ GIVEN many to many specification stored in a YAML file WHEN init_yaml is called with the file THEN the association is importable from open_alchemy.models. """ # pylint: disable=import-error,import-outside-toplevel spec = { "components": { "schemas": { "RefTable": { "properties": { "column": { "type": "integer", "x-primary-key": True } }, "x-tablename": "ref_table", "type": "object", "x-secondary": "association", }, "Table": { "properties": { "column": { "type": "integer", "x-primary-key": True }, "ref_tables": { "type": "array", "items": { "$ref": "#/components/schemas/RefTable" }, }, }, "x-tablename": "table", "type": "object", }, } } } # Generate spec file directory = tmp_path / "specs" directory.mkdir() spec_file = directory / "spec.yaml" spec_file.write_text(yaml.dump(spec)) # Creating model factory open_alchemy.init_yaml(str(spec_file), define_all=True) # Creating models from open_alchemy.models import Base Base.metadata.create_all(engine) # Creating model instance from open_alchemy.models import RefTable from open_alchemy.models import Table from open_alchemy.models import association ref_instance = RefTable(column=11) model_instance = Table(column=12, ref_tables=[ref_instance]) session = sessionmaker() session.add(model_instance) session.flush() # Querying session queried_association = session.query(association).first() assert queried_association == (12, 11)
from open_alchemy import init_yaml init_yaml("kwargs-example-spec.yml", models_filename="kwargs_models_auto.py")
"""Setup for the database.""" import os from flask_sqlalchemy import SQLAlchemy from open_alchemy import init_yaml # Construct models db = SQLAlchemy() SPEC_DIR = os.path.dirname(__file__) SPEC_FILE = os.path.join(SPEC_DIR, "api.yaml") init_yaml(SPEC_FILE, base=db.Model)
from logging.config import fileConfig from pathlib import Path from alembic import context import open_alchemy from sqlalchemy import engine_from_config from sqlalchemy import pool specfile = Path(".").parent / "openapi/openapi.yml" open_alchemy.init_yaml(spec_filename=specfile.absolute()) # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata target_metadata = open_alchemy.models.Base.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc.
from open_alchemy import init_yaml init_yaml("joined-example-spec.yml", models_filename="joined_models_auto.py")
from open_alchemy import init_yaml init_yaml("composite-index-example-spec.yml")
"""Define the WSGI app.""" from pathlib import Path from open_alchemy import init_yaml from sqlalchemy.ext.declarative import declarative_base from pyconsql import connexion_utils from pyconsql import database # Generate models. settings = connexion_utils.get_settings() init_yaml( settings["SPECIFICATION_FILE"], base=database.Base, models_filename=settings["MODELS_FILENAME"], ) # Start the API. app = connexion_utils.create_connexion_app() application = app.app
sender_app.logger.info(f"ERROR for {flask.request.url}") def start_updater(app: Flask) -> Thread: """ Starts a new Thread for to perform updates in the background. """ from mcserver.app.services import CorpusService t = Thread(target=CorpusService.init_updater, args=(app, )) t.daemon = True t.start() return t def shutdown_session(exception=None): """ Shuts down the session when the application exits. (maybe also after every request ???) """ db.session.remove() db: SQLAlchemy = create_database() migrate: Migrate = Migrate(directory=Config.MIGRATIONS_DIRECTORY) if not hasattr(open_alchemy.models, Config.DATABASE_TABLE_CORPUS): # do this _BEFORE_ you add any APIs to your application init_yaml(Config.API_SPEC_MODELS_YAML_FILE_PATH, base=db.Model, models_filename=os.path.join(Config.MC_SERVER_DIRECTORY, "models_auto.py")) # import the models so we can access them from other parts of the app using imports from "app.models"; # this has to be at the bottom of the file from mcserver.app import models from mcserver.app import api