def test_deepcopy(self): url1 = url.make_url( "dialect://*****:*****@host/db?arg1%3D=param1&arg2=param+2") url2 = copy.deepcopy(url1) eq_(url1, url2) is_not(url1, url2) is_not(url1.query, url2.query) # immutabledict of immutable k/v,
def test_no_instance_level_collections(self): @event.listens_for(self.Target, "event_one") def listen_one(x, y): pass t1 = self.Target() t2 = self.Target() t1.dispatch.event_one(5, 6) t2.dispatch.event_one(5, 6) is_( self.Target.dispatch._empty_listener_reg[self.Target]["event_one"], t1.dispatch.event_one, ) @event.listens_for(t1, "event_one") def listen_two(x, y): pass is_not( self.Target.dispatch._empty_listener_reg[self.Target]["event_one"], t1.dispatch.event_one, ) is_( self.Target.dispatch._empty_listener_reg[self.Target]["event_one"], t2.dispatch.event_one, )
def test_regenerate_connection(self, connection): async_connection = AsyncConnection._retrieve_proxy_for_target( connection) a2 = AsyncConnection._retrieve_proxy_for_target(connection) is_(async_connection, a2) is_not(async_connection, None) is_(async_connection.engine, a2.engine) is_not(async_connection.engine, None)
def test_mc_duplication_plain(self, decl_base): class MixinOne: name: Mapped[str] = mapped_column() class A(MixinOne, decl_base): __tablename__ = "a" id: Mapped[int] = mapped_column(primary_key=True) class B(MixinOne, decl_base): __tablename__ = "b" id: Mapped[int] = mapped_column(primary_key=True) is_not(A.__table__.c.name, B.__table__.c.name)
async def test_dispose(self, async_engine): c1 = await async_engine.connect() c2 = await async_engine.connect() await c1.close() await c2.close() p1 = async_engine.pool eq_(async_engine.pool.checkedin(), 2) await async_engine.dispose() eq_(async_engine.pool.checkedin(), 0) is_not(p1, async_engine.pool)
def test_mc_duplication_declared_attr(self, decl_base): class MixinOne: @declared_attr def name(cls) -> Mapped[str]: return mapped_column() class A(MixinOne, decl_base): __tablename__ = "a" id: Mapped[int] = mapped_column(primary_key=True) class B(MixinOne, decl_base): __tablename__ = "b" id: Mapped[int] = mapped_column(primary_key=True) is_not(A.__table__.c.name, B.__table__.c.name)
async def test_dispose(self, async_engine): c1 = await async_engine.connect() c2 = await async_engine.connect() await c1.close() await c2.close() p1 = async_engine.pool if isinstance(p1, AsyncAdaptedQueuePool): eq_(async_engine.pool.checkedin(), 2) await async_engine.dispose() if isinstance(p1, AsyncAdaptedQueuePool): eq_(async_engine.pool.checkedin(), 0) is_not(p1, async_engine.pool)
def test_chained_add_operator(self): User = self.classes.User session = Session() def l1(): return session.query(User) def l2(q): return q.filter(User.name == bindparam("name")) q1 = self.bakery(l1) q2 = q1 + l2 is_not(q2, q1) self._assert_cache_key(q1._cache_key, [l1]) self._assert_cache_key(q2._cache_key, [l1, l2])
async def test_invalidate(self, async_engine): conn = await async_engine.connect() is_(conn.invalidated, False) connection_fairy = await conn.get_raw_connection() is_(connection_fairy.is_valid, True) dbapi_connection = connection_fairy.connection await conn.invalidate() assert dbapi_connection._connection.is_closed() new_fairy = await conn.get_raw_connection() is_not(new_fairy.connection, dbapi_connection) is_not(new_fairy, connection_fairy) is_(new_fairy.is_valid, True) is_(connection_fairy.is_valid, False)
def test_chained_add(self): User = self.classes.User session = fixture_session() def l1(): return session.query(User) def l2(q): return q.filter(User.name == bindparam("name")) q1 = self.bakery(l1) q2 = q1.with_criteria(l2) is_not(q2, q1) self._assert_cache_key(q1._cache_key, [l1]) self._assert_cache_key(q2._cache_key, [l1, l2])
def test_unregister(self, registry): class MyClassState(instrumentation.InstrumentationManager): def manage(self, class_, manager): setattr(class_, "xyz", manager) def unregister(self, class_, manager): delattr(class_, "xyz") def manager_getter(self, class_): def get(cls): return cls.xyz return get class MyClass: __sa_instrumentation_manager__ = MyClassState assert attributes.opt_manager_of_class(MyClass) is None with expect_raises_message( sa.orm.exc.UnmappedClassError, r"Can't locate an instrumentation manager for class .*MyClass", ): attributes.manager_of_class(MyClass) t = Table( "my_table", registry.metadata, Column("id", Integer, primary_key=True), ) registry.map_imperatively(MyClass, t) manager = attributes.opt_manager_of_class(MyClass) is_not(manager, None) is_(manager, MyClass.xyz) registry.configure() registry.dispose() manager = attributes.opt_manager_of_class(MyClass) is_(manager, None) assert not hasattr(MyClass, "xyz")
async def test_dispose_no_close(self, async_engine): c1 = await async_engine.connect() c2 = await async_engine.connect() await c1.close() await c2.close() p1 = async_engine.pool if isinstance(p1, AsyncAdaptedQueuePool): eq_(async_engine.pool.checkedin(), 2) await async_engine.dispose(close=False) # TODO: test that DBAPI connection was not closed if isinstance(p1, AsyncAdaptedQueuePool): eq_(async_engine.pool.checkedin(), 0) is_not(p1, async_engine.pool)
async def test_invalidate(self, async_engine): conn = await async_engine.connect() is_(conn.invalidated, False) connection_fairy = await conn.get_raw_connection() is_(connection_fairy.is_valid, True) dbapi_connection = connection_fairy.dbapi_connection await conn.invalidate() if testing.against("postgresql+asyncpg"): assert dbapi_connection._connection.is_closed() new_fairy = await conn.get_raw_connection() is_not(new_fairy.dbapi_connection, dbapi_connection) is_not(new_fairy, connection_fairy) is_(new_fairy.is_valid, True) is_(connection_fairy.is_valid, False) await conn.close()
def test_to_metadata(self): comp1 = Computed("x + 2") m = MetaData() t = Table("t", m, Column("x", Integer), Column("y", Integer, comp1)) is_(comp1.column, t.c.y) is_(t.c.y.server_onupdate, comp1) is_(t.c.y.server_default, comp1) m2 = MetaData() t2 = t.to_metadata(m2) comp2 = t2.c.y.server_default is_not(comp1, comp2) is_(comp1.column, t.c.y) is_(t.c.y.server_onupdate, comp1) is_(t.c.y.server_default, comp1) is_(comp2.column, t2.c.y) is_(t2.c.y.server_onupdate, comp2) is_(t2.c.y.server_default, comp2)
def test_unregister(self, registry): class MyClassState(instrumentation.InstrumentationManager): def manage(self, class_, manager): setattr(class_, "xyz", manager) def unregister(self, class_, manager): delattr(class_, "xyz") def manager_getter(self, class_): def get(cls): return cls.xyz return get class MyClass(object): __sa_instrumentation_manager__ = MyClassState assert attributes.manager_of_class(MyClass) is None t = Table( "my_table", registry.metadata, Column("id", Integer, primary_key=True), ) registry.map_imperatively(MyClass, t) manager = attributes.manager_of_class(MyClass) is_not(manager, None) is_(manager, MyClass.xyz) registry.configure() registry.dispose() manager = attributes.manager_of_class(MyClass) is_(manager, None) assert not hasattr(MyClass, "xyz")
def test_autoincrement(self, metadata, connection): Table( "ai_1", metadata, Column("int_y", Integer, primary_key=True, autoincrement=True), Column("int_n", Integer, DefaultClause("0"), primary_key=True), ) Table( "ai_2", metadata, Column("int_y", Integer, primary_key=True, autoincrement=True), Column("int_n", Integer, DefaultClause("0"), primary_key=True), ) Table( "ai_3", metadata, Column("int_n", Integer, DefaultClause("0"), primary_key=True), Column("int_y", Integer, primary_key=True, autoincrement=True), ) Table( "ai_4", metadata, Column("int_n", Integer, DefaultClause("0"), primary_key=True), Column("int_n2", Integer, DefaultClause("0"), primary_key=True), ) Table( "ai_5", metadata, Column("int_y", Integer, primary_key=True, autoincrement=True), Column("int_n", Integer, DefaultClause("0"), primary_key=True), ) Table( "ai_6", metadata, Column("o1", String(1), DefaultClause("x"), primary_key=True), Column("int_y", Integer, primary_key=True, autoincrement=True), ) Table( "ai_7", metadata, Column("o1", String(1), DefaultClause("x"), primary_key=True), Column("o2", String(1), DefaultClause("x"), primary_key=True), Column("int_y", Integer, autoincrement=True, primary_key=True), ) Table( "ai_8", metadata, Column("o1", String(1), DefaultClause("x"), primary_key=True), Column("o2", String(1), DefaultClause("x"), primary_key=True), ) metadata.create_all(connection) table_names = [ "ai_1", "ai_2", "ai_3", "ai_4", "ai_5", "ai_6", "ai_7", "ai_8", ] mr = MetaData() for name in table_names: tbl = Table(name, mr, autoload_with=connection) tbl = metadata.tables[name] # test that the flag itself reflects appropriately for col in tbl.c: if "int_y" in col.name: is_(col.autoincrement, True) is_(tbl._autoincrement_column, col) else: eq_(col.autoincrement, "auto") is_not(tbl._autoincrement_column, col) eng = [ engines.testing_engine(options={"implicit_returning": False}), engines.testing_engine(options={"implicit_returning": True}), ] for counter, engine in enumerate(eng): connection.execute(tbl.insert()) if "int_y" in tbl.c: eq_( connection.execute(select(tbl.c.int_y)).scalar(), counter + 1, ) assert (list(connection.execute( tbl.select()).first()).count(counter + 1) == 1) else: assert 1 not in list( connection.execute(tbl.select()).first()) connection.execute(tbl.delete())
def test_all_import(self): for package in self._all_dialect_packages(): for item_name in package.__all__: is_not(None, getattr(package, item_name))
def test_copy(self): url1 = url.make_url( "dialect://*****:*****@host/db?arg1%3D=param1&arg2=param+2") url2 = copy.copy(url1) eq_(url1, url2) is_not(url1, url2)