def test_conn_reusable(self): conn = self.db.connect() conn.execute(select(1)) eq_(self.dbapi.connect.mock_calls, [self.mock_connect]) self.dbapi.shutdown() with expect_raises(tsa.exc.DBAPIError): conn.execute(select(1)) assert not conn.closed assert conn.invalidated eq_([c.close.mock_calls for c in self.dbapi.connections], [[call()]]) # trans was autobegin. they have to call rollback with expect_raises(tsa.exc.PendingRollbackError): conn.execute(select(1)) # ok conn.rollback() # now we are good # test reconnects conn.execute(select(1)) assert not conn.invalidated eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], []], )
def _assert_not_order(self, cls, create, dc_arguments): with expect_raises(TypeError): create("g", 10) < create("b", 7) with expect_raises(TypeError): create("g", 10) > create("b", 7) with expect_raises(TypeError): create("g", 10) <= create("b", 7) with expect_raises(TypeError): create("g", 10) >= create("b", 7)
def thread_go(q): def go(): q.get(timeout=0.1) with expect_raises(queue.Empty): asyncio.run(greenlet_spawn(go)) run[0] = True
def test_class_getitem_as_declarative(self): T = TypeVar("T", bound="CommonBase") # noqa class CommonBase(Generic[T]): @classmethod def boring(cls: Type[T]) -> Type[T]: return cls @classmethod def more_boring(cls: Type[T]) -> int: return 27 @as_declarative() class Base(CommonBase[T]): foo = 1 class Tab(Base["Tab"]): __tablename__ = "foo" a = Column(Integer, primary_key=True) eq_(Tab.foo, 1) is_(Tab.__table__, inspect(Tab).local_table) eq_(Tab.boring(), Tab) eq_(Tab.more_boring(), 27) with expect_raises(AttributeError): Tab.non_existent
def test_get_view_definition(self, connection): insp = inspect(connection) eq_( insp.get_view_definition("tbl_plain_v"), "SELECT id, data FROM tbl WHERE id > 100", ) eq_( insp.get_view_definition("tbl_v"), "SELECT id, data FROM tbl WHERE id > 42", ) with expect_raises(exc.NoSuchTableError): eq_(insp.get_view_definition("view_syn"), None) eq_( insp.get_view_definition("view_syn", oracle_resolve_synonyms=True), "SELECT id, data FROM tbl WHERE id > 100", ) eq_( insp.get_view_definition( "syn_schema_view", oracle_resolve_synonyms=True ), "SELECT 1 AS value FROM dual", ) eq_( insp.get_view_definition( "ts_v_s", oracle_resolve_synonyms=True, schema=testing.config.test_schema, ), "SELECT id, data FROM tbl WHERE id > 100", )
def test_reconnect(self): with self.engine.connect() as conn: eq_(conn.execute(select(1)).scalar(), 1) assert not conn.closed self.engine.test_shutdown() _assert_invalidated(conn.execute, select(1)) assert not conn.closed assert conn.invalidated assert conn.invalidated with expect_raises(tsa.exc.PendingRollbackError): conn.execute(select(1)) conn.rollback() eq_(conn.execute(select(1)).scalar(), 1) assert not conn.invalidated # one more time self.engine.test_shutdown() _assert_invalidated(conn.execute, select(1)) assert conn.invalidated conn.rollback() eq_(conn.execute(select(1)).scalar(), 1) assert not conn.invalidated
def test_dc_base_unsupported_argument(self, registry: _RegistryType): reg = registry with expect_raises(TypeError): class Base(MappedAsDataclass, DeclarativeBase, slots=True): registry = reg class Base2(MappedAsDataclass, DeclarativeBase, order=True): registry = reg with expect_raises(TypeError): class A(Base2, slots=False): __tablename__ = "a" id: Mapped[int] = mapped_column(primary_key=True, init=False)
def test_no_begin_on_invalid(self): with self.engine.connect() as conn: conn.begin() conn.invalidate() with expect_raises(exc.PendingRollbackError): conn.commit()
def _assert_not_unsafe_hash(self, cls, create, dc_arguments): a1 = create("d1", 5) if dc_arguments["eq"]: with expect_raises(TypeError): hash(a1) else: hash(a1)
async def test_no_async_listeners(self, async_session): with testing.expect_raises( NotImplementedError, "NotImplementedError: asynchronous events are not implemented " "at this time. Apply synchronous listeners to the " "AsyncEngine.sync_engine or " "AsyncConnection.sync_connection attributes.", ): event.listen(async_session, "before_flush", mock.Mock())
async def test_pool_exhausted_no_timeout(self, async_engine): engine = create_async_engine( testing.db.url, pool_size=1, max_overflow=0, pool_timeout=0, ) async with engine.connect(): with expect_raises(exc.TimeoutError): await engine.connect()
def test_rollback_on_invalid_savepoint(self): with self.engine.connect() as conn: conn.begin() trans2 = conn.begin_nested() conn.invalidate() # this passes silently, as it will often be involved # in error catching schemes trans2.rollback() # still invalid though with expect_raises(exc.PendingRollbackError): conn.begin_nested()
def test_indexed_key_token(self): umapper = inspect(self.classes.User) amapper = inspect(self.classes.Address) path = PathRegistry.coerce(( umapper, umapper.attrs.addresses, amapper, PathToken.intern(":*"), )) is_true(path.is_token) eq_(path[1], umapper.attrs.addresses) eq_(path[3], ":*") with expect_raises(IndexError): path[amapper]
def _assert_not_init(self, cls, create, dc_arguments): with expect_raises(TypeError): cls("Some data", 5) # we run real "dataclasses" on the class. so with init=False, it # doesn't touch what was there, and the SQLA default constructor # gets put on. a1 = cls(data="some data") eq_(a1.data, "some data") eq_(a1.x, None) a1 = cls() eq_(a1.data, None) # no constructor, it sets None for x...ok eq_(a1.x, None)
def test_pre_ping_db_is_restarted(self): engine = engines.reconnecting_engine(options={"pool_pre_ping": True}) conn = engine.connect() eq_(conn.execute(select(1)).scalar(), 1) stale_connection = conn.connection.connection conn.close() engine.test_shutdown() engine.test_restart() conn = engine.connect() eq_(conn.execute(select(1)).scalar(), 1) conn.close() with expect_raises(engine.dialect.dbapi.Error, check_context=False): curs = stale_connection.cursor() curs.execute("select 1")
def test_temporary_table(self, connection, table_name, exists): metadata = self.metadata if exists: tt = Table( table_name, self.metadata, Column("id", Integer, primary_key=True), Column("txt", mssql.NVARCHAR(50)), Column("dt2", mssql.DATETIME2), ) tt.create(connection) connection.execute( tt.insert(), [ { "id": 1, "txt": u"foo", "dt2": datetime.datetime(2020, 1, 1, 1, 1, 1), }, { "id": 2, "txt": u"bar", "dt2": datetime.datetime(2020, 2, 2, 2, 2, 2), }, ], ) if not exists: with expect_raises(exc.NoSuchTableError): Table( table_name, metadata, autoload_with=connection, ) else: tmp_t = Table(table_name, metadata, autoload_with=connection) result = connection.execute( tmp_t.select().where(tmp_t.c.id == 2) ).fetchall() eq_( result, [(2, "bar", datetime.datetime(2020, 2, 2, 2, 2, 2))], )
async def test_new_style_active_history( self, async_session, one_to_one_fixture, _legacy_inactive_history_style ): A, B = await one_to_one_fixture(_legacy_inactive_history_style) a1 = A() b1 = B() a1.b = b1 async_session.add(a1) await async_session.commit() b2 = B() if _legacy_inactive_history_style: # aiomysql dialect having problems here, emitting weird # pytest warnings and we might need to just skip for aiomysql # here, which is also raising StatementError w/ MissingGreenlet # inside of it with testing.expect_raises( (exc.StatementError, exc.MissingGreenlet) ): a1.b = b2 else: a1.b = b2 await async_session.flush() await async_session.refresh(b1) eq_( ( await async_session.execute( select(func.count()) .where(B.id == b1.id) .where(B.a_id == None) ) ).scalar(), 1, )
async def test_propagate_cancelled(self): """test #6652""" cleanup = [] async def async_meth_raise(): raise asyncio.CancelledError() def sync_meth(): try: await_only(async_meth_raise()) except: cleanup.append(True) raise async def run_w_cancel(): await greenlet_spawn(sync_meth) with expect_raises(asyncio.CancelledError, check_context=False): await run_w_cancel() assert cleanup
def test_ensure_not_hashable(self): d = {} obj = Foo() with testing.expect_raises(TypeError): d[obj] = True
def test_insert_floats( self, metadata, fe_engine, include_setinputsizes, use_fastexecutemany, apply_setinputsizes_flag, ): expect_failure = (apply_setinputsizes_flag and not include_setinputsizes and use_fastexecutemany) engine = fe_engine(use_fastexecutemany, apply_setinputsizes_flag) observations = Table( "Observations", metadata, Column("id", Integer, nullable=False, primary_key=True), Column("obs1", Numeric(19, 15), nullable=True), Column("obs2", Numeric(19, 15), nullable=True), schema="test_schema", ) with engine.begin() as conn: metadata.create_all(conn) records = [ { "id": 1, "obs1": Decimal("60.1722066045792"), "obs2": Decimal("24.929289808227466"), }, { "id": 2, "obs1": Decimal("60.16325715615476"), "obs2": Decimal("24.93886459535008"), }, { "id": 3, "obs1": Decimal("60.16445165123469"), "obs2": Decimal("24.949856300109516"), }, ] if include_setinputsizes: canary = mock.Mock() @event.listens_for(engine, "do_setinputsizes") def do_setinputsizes(inputsizes, cursor, statement, parameters, context): canary(list(inputsizes.values())) for key in inputsizes: if isinstance(key.type, Numeric): inputsizes[key] = ( engine.dialect.dbapi.SQL_DECIMAL, 19, 15, ) with engine.begin() as conn: if expect_failure: with expect_raises(DBAPIError): conn.execute(observations.insert(), records) else: conn.execute(observations.insert(), records) eq_( conn.execute( select(observations).order_by( observations.c.id)).mappings().all(), records, ) if include_setinputsizes: if apply_setinputsizes_flag: eq_( canary.mock_calls, [ # float for int? this seems wrong mock.call([float, float, float]), mock.call([]), ], ) else: eq_(canary.mock_calls, [])
def prime(): with expect_raises(queue.Empty): q.get(timeout=0.1)