def postgres_db(): engine = create_engine( # ISOLATION LEVEL ENSURES version IS RESPECTED config.get_postgres_uri(), isolation_level="REPEATABLE READ", ) wait_for_engine_to_come_up(engine) metadata.create_all(engine) return engine
def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ # url = config.get_main_option("sqlalchemy.url") url = app_config.get_postgres_uri() context.configure( url=url, target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, ) with context.begin_transaction(): context.run_migrations()
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ # connectable = engine_from_config( # config.get_section(config.config_ini_section), # prefix="sqlalchemy.", # poolclass=pool.NullPool, # ) url = app_config.get_postgres_uri() connectable = create_engine(url) with connectable.connect() as connection: context.configure( connection=connection, target_metadata=target_metadata, compare_type=True, ) with context.begin_transaction(): context.run_migrations()
def postgres_db(): engine = create_engine(config.get_postgres_uri()) wait_for_postgres_to_come_up(engine) metadata.create_all(engine) return engine
def postgres_db(): engine = create_engine(config.get_postgres_uri(), isolation_level='SERIALIZABLE') wait_for_postgres_to_come_up(engine) metadata.create_all(engine) return engine
def collect_new_events(self): for product in self.products.seen: while product.events: yield product.events.pop(0) @abc.abstractmethod def _commit(self): raise NotImplementedError @abc.abstractmethod def rollback(self): raise NotImplementedError DEFAULT_SESSION_FACTORY = sessionmaker( bind=create_engine(config.get_postgres_uri(), isolation_level="REPEATABLE READ",) ) class SqlAlchemyUnitOfWork(AbstractUnitOfWork): def __init__(self, session_factory=DEFAULT_SESSION_FACTORY): self.session_factory = session_factory def __enter__(self): self.session = self.session_factory() # type: Session self.products = repository.SqlAlchemyRepository(self.session) return super().__enter__() def __exit__(self, *args): super().__exit__(*args) self.session.close()
while product.events: yield product.events.pop(0) @abc.abstractmethod def _commit(self): raise NotImplementedError @abc.abstractmethod def rollback(self): raise NotImplementedError <<<<<<< HEAD DEFAULT_SESSION_FACTORY = sessionmaker(bind=create_engine( config.get_postgres_uri(), isolation_level="REPEATABLE READ", )) class SqlAlchemyUnitOfWork(AbstractUnitOfWork): ======= DEFAULT_SESSION_FACTORY = sessionmaker( bind=create_engine( config.get_postgres_uri(), isolation_level="REPEATABLE READ", ) ) class SqlAlchemyUnitOfWork(AbstractUnitOfWork):
return self def __exit__(self, *args): self.rollback() @abc.abstractmethod def commit(self): raise NotImplementedError @abc.abstractmethod def rollback(self): raise NotImplementedError DEFAULT_SESSION_FACTORY = sessionmaker( bind=create_engine(config.get_postgres_uri(), )) class SqlAlchemyUnitOfWork(AbstractUnitOfWork): def __init__(self, session_factory=DEFAULT_SESSION_FACTORY): self.session_factory = session_factory self.session: Optional[Session] = None def __enter__(self): self.session = self.session_factory() self.batches = repository.SqlAlchemyRepository(self.session) return super().__enter__() def __exit__(self, *args): super().__exit__(*args) self.session.close()
# about consistency issues because of concurrent transactions # Rui Conti, Apr 2020 import abc from typing import Callable, Generator from sqlalchemy import create_engine from sqlalchemy import orm from allocation import config from allocation.adapters import repository DEFAULT_SESSION_FACTORY = orm.sessionmaker( bind=create_engine( config.get_postgres_uri(), isolation_level="SERIALIZABLE", ), autoflush=False, ) class AbstractUnitOfWork(abc.ABC): products: repository.AbstractProductRepository def __enter__(self): # type: ignore return self def __exit__(self, *args): # type: ignore self.rollback() def collect_events(self) -> None:
from flask import Flask, jsonify, request from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from datetime import datetime from allocation import config from allocation.domain import model from allocation.adapters import orm, repository from allocation.service_layer import services from allocation.service_layer import unit_of_work orm.start_mappers() get_session = sessionmaker(bind=create_engine(config.get_postgres_uri())) app = Flask(__name__) @app.route("/allocate", methods=["POST"]) def allocation_endpoint(): uow = unit_of_work.SqlAlchemyUnitOfWork() try: batchref = services.allocate(request.json["orderid"], request.json["sku"], request.json["qty"], uow) except (model.OutOfStock, services.InvalidSku) as e: return jsonify({"message": str(e)}), 400 return jsonify({"batchref": batchref}), 201 @app.route("/add_batch", methods=["POST"]) def add_batch():
def postgres_db(): engine = create_engine(config.get_postgres_uri(), isolation_level="SERIALIZABLE")
def collect_new_events(self): for product in self.products.seen: while product.events: yield product.events.pop(0) @abc.abstractmethod def _commit(self): raise NotImplementedError @abc.abstractmethod def rollback(self): raise NotImplementedError DEFAULT_SESSION_FACTORY = sessionmaker(bind=create_engine( config.get_postgres_uri(), isolation_level='REPEATABLE READ')) class SqlAlchemyUnitOfWork(AbstractUnitOfWork): def __init__(self, session_factory=DEFAULT_SESSION_FACTORY): self.session_factory = session_factory def __enter__(self): self.session = self.session_factory() # type: Session self.products = repository.SqlAlchemyRepository(self.session) return super().__enter__() def __exit__(self, *args): super().__exit__(*args) self.session.close()