async def async_main() -> None: """Main program function.""" engine = create_async_engine( "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) async with engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) async_session = async_sessionmaker(engine, expire_on_commit=False) async with async_session.begin() as session: await session.run_sync(work_with_a_session_one) await session.run_sync(work_with_a_session_two, param="foo") session.add_all([ A(bs=[B(), B()], data="a1"), A(bs=[B()], data="a2"), A(bs=[B(), B()], data="a3"), ]) async with async_session() as session: result = await session.execute(select(A).order_by(A.id)) r: ScalarResult[A] = result.scalars() a1 = r.one() a1.data = "new data" await session.commit()
async def async_main(): engine = create_async_engine( "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) await conn.run_sync(Base.metadata.create_all) async_session = async_sessionmaker(engine, expire_on_commit=False) async with async_session() as session, session.begin(): session.add_all([A(data="a_%d" % i) for i in range(100)]) statements = [ select(A).where(A.data == "a_%d" % random.choice(range(100))) for i in range(30) ] results = await asyncio.gather( *(run_out_of_band(async_session, session, statement) for statement in statements)) print(f"results: {[r.all() for r in results]}")
def async_scoped_session_factory( engine: AsyncEngine, ) -> async_scoped_session[MyAsyncSession]: return async_scoped_session( async_sessionmaker(engine, class_=MyAsyncSession), scopefunc=lambda: None, )
async def async_main(): """Main program function.""" engine = create_async_engine( "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) async with engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) # expire_on_commit=False will prevent attributes from being expired # after commit. async_session = async_sessionmaker(engine, expire_on_commit=False) async with async_session() as session: async with session.begin(): session.add_all([ A(bs=[B(), B()], data="a1"), A(bs=[B()], data="a2"), A(bs=[B(), B()], data="a3"), ]) # for relationship loading, eager loading should be applied. stmt = select(A).options(selectinload(A.bs)) # AsyncSession.execute() is used for 2.0 style ORM execution # (same as the synchronous API). result = await session.execute(stmt) # result is a buffered Result object. for a1 in result.scalars(): print(a1) print(f"created at: {a1.create_date}") for b1 in a1.bs: print(b1) # for streaming ORM results, AsyncSession.stream() may be used. result = await session.stream(stmt) # result is a streaming AsyncResult object. async for a1 in result.scalars(): print(a1) for b1 in a1.bs: print(b1) result = await session.execute(select(A).order_by(A.id)) a1 = result.scalars().first() a1.data = "new data" await session.commit()
async def test_async_sessionmaker_block_two(self, async_engine): User = self.classes.User maker = async_sessionmaker(async_engine) async with maker.begin() as session: u1 = User(name="u1") assert session.in_transaction() session.add(u1) assert not session.in_transaction() async with maker() as session: result = await session.execute( select(User).where(User.name == "u1") ) u1 = result.scalar_one() eq_(u1.name, "u1")
async def run_out_of_band(async_sessionmaker, session, statement, merge_results=True): """run an ORM statement in a distinct session, merging the result back into the given session. """ async with async_sessionmaker() as oob_session: # use AUTOCOMMIT for each connection to reduce transaction # overhead / contention await oob_session.connection( execution_options={"isolation_level": "AUTOCOMMIT"}) # pre 1.4.24 # await oob_session.run_sync( # lambda sync_session: sync_session.connection( # execution_options={"isolation_level": "AUTOCOMMIT"} # ) # ) result = await oob_session.execute(statement) if merge_results: # merge_results means the ORM objects from the result # will be merged back into the original session. # load=False means we can use the objects directly without # re-selecting them. however this merge operation is still # more expensive CPU-wise than a regular ORM load because the # objects are copied into new instances return (await session.run_sync( merge_frozen_result, statement, result.freeze(), load=False, ))() else: await result.close()
def async_session_factory( engine: AsyncEngine, ) -> async_sessionmaker[MyAsyncSession]: return async_sessionmaker(engine, class_=MyAsyncSession)
def test_init_asyncio_sessionmaker(self, async_engine): sm = async_sessionmaker(async_engine, sync_session_class=_MySession) ass = sm() is_true(isinstance(ass.sync_session, _MySession)) is_(ass.sync_session_class, _MySession)