def test_reset_on_return(self): dbapi = MockDBAPI( foober=12, lala=18, hoho={"this": "dict"}, fooz="somevalue" ) for (value, expected) in [ ("rollback", pool.reset_rollback), ("commit", pool.reset_commit), (None, pool.reset_none), (True, pool.reset_rollback), (False, pool.reset_none), ]: e = create_engine( "postgresql://", pool_reset_on_return=value, module=dbapi, _initialize=False, ) assert e.pool._reset_on_return is expected assert_raises( exc.ArgumentError, create_engine, "postgresql://", pool_reset_on_return="hi", module=dbapi, _initialize=False, )
def test_clauseelement(self): metadata = MetaData() table = Table("test_table", metadata, Column("foo", Integer)) metadata.create_all(bind=testing.db) try: for elem in [ table.select, lambda **kwargs: sa.func.current_timestamp(**kwargs).select(), # func.current_timestamp().select, lambda **kwargs: text("select * from test_table", **kwargs), ]: for bind in (testing.db, testing.db.connect()): try: e = elem(bind=bind) assert e.bind is bind e.execute().close() finally: if isinstance(bind, engine.Connection): bind.close() e = elem() assert e.bind is None assert_raises(exc.UnboundExecutionError, e.execute) finally: if isinstance(bind, engine.Connection): bind.close() metadata.drop_all(bind=testing.db)
def test_collection(self): users, addresses, Address = ( self.tables.users, self.tables.addresses, self.classes.Address, ) canary = Mock() class User(fixtures.ComparableEntity): @validates("addresses") def validate_address(self, key, ad): canary(key, ad) assert "@" in ad.email_address return ad mapper(User, users, properties={"addresses": relationship(Address)}) mapper(Address, addresses) sess = Session() u1 = User(name="edward") a0 = Address(email_address="noemail") assert_raises(AssertionError, u1.addresses.append, a0) a1 = Address(id=15, email_address="*****@*****.**") u1.addresses.append(a1) eq_(canary.mock_calls, [call("addresses", a0), call("addresses", a1)]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name="edward").one(), User( name="edward", addresses=[Address(email_address="*****@*****.**")] ), )
def test_exec_once_unless_exception(self): m1 = Mock() m1.side_effect = ValueError event.listen(self.Target, "event_one", m1) t1 = self.Target() assert_raises( ValueError, t1.dispatch.event_one.for_modify( t1.dispatch).exec_once_unless_exception, 5, 6, ) assert_raises( ValueError, t1.dispatch.event_one.for_modify( t1.dispatch).exec_once_unless_exception, 7, 8, ) m1.side_effect = None t1.dispatch.event_one.for_modify( t1.dispatch).exec_once_unless_exception(9, 10) t1.dispatch.event_one.for_modify( t1.dispatch).exec_once_unless_exception(11, 12) eq_(m1.mock_calls, [call(5, 6), call(7, 8), call(9, 10)])
def test_scalar(self): users = self.tables.users canary = Mock() class User(fixtures.ComparableEntity): @validates("name") def validate_name(self, key, name): canary(key, name) ne_(name, "fred") return name + " modified" mapper(User, users) sess = Session() u1 = User(name="ed") eq_(u1.name, "ed modified") assert_raises(AssertionError, setattr, u1, "name", "fred") eq_(u1.name, "ed modified") eq_(canary.mock_calls, [call("name", "ed"), call("name", "fred")]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name="ed modified").one(), User(name="ed"), )
def test_foreignkey_missing_insert(self): Table("t1", self.metadata, Column("id", Integer, primary_key=True)) t2 = Table( "t2", self.metadata, Column("id", Integer, ForeignKey("t1.id"), primary_key=True), ) self.metadata.create_all() # want to ensure that "null value in column "id" violates not- # null constraint" is raised (IntegrityError on psycoopg2, but # ProgrammingError on pg8000), and not "ProgrammingError: # (ProgrammingError) relationship "t2_id_seq" does not exist". # the latter corresponds to autoincrement behavior, which is not # the case here due to the foreign key. for eng in [ engines.testing_engine(options={"implicit_returning": False}), engines.testing_engine(options={"implicit_returning": True}), ]: with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), eng.execute, t2.insert(), )
def test_rekey(self): class User(Base, fixtures.ComparableEntity): __tablename__ = "users" __autoload__ = True nom = Column("name", String(50), key="nom") addresses = relationship("Address", backref="user") class Address(Base, fixtures.ComparableEntity): __tablename__ = "addresses" __autoload__ = True u1 = User( nom="u1", addresses=[Address(email="one"), Address(email="two")] ) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() eq_( sess.query(User).all(), [ User( nom="u1", addresses=[Address(email="one"), Address(email="two")], ) ], ) a1 = sess.query(Address).filter(Address.email == "two").one() eq_(a1, Address(email="two")) eq_(a1.user, User(nom="u1")) assert_raises(TypeError, User, name="u3")
def test_too_long_index_name(self): dialect = testing.db.dialect.__class__() for max_ident, max_index in [(22, None), (256, 22)]: dialect.max_identifier_length = max_ident dialect.max_index_name_length = max_index for tname, cname, exp in [ ("sometable", "this_name_is_too_long", "ix_sometable_t_09aa"), ("sometable", "this_name_alsois_long", "ix_sometable_t_3cf1"), ]: t1 = Table(tname, MetaData(), Column(cname, Integer, index=True)) ix1 = list(t1.indexes)[0] self.assert_compile( schema.CreateIndex(ix1), "CREATE INDEX %s " "ON %s (%s)" % (exp, tname, cname), dialect=dialect, ) dialect.max_identifier_length = 22 dialect.max_index_name_length = None t1 = Table("t", MetaData(), Column("c", Integer)) assert_raises( exc.IdentifierError, schema.CreateIndex( Index("this_other_name_is_too_long_for_what_were_doing", t1.c.c)).compile, dialect=dialect, )
def test_noninherited_warning(self): A, B, b_table, a_table, Dest, dest_table = ( self.classes.A, self.classes.B, self.tables.b_table, self.tables.a_table, self.classes.Dest, self.tables.dest_table, ) mapper(A, a_table, properties={"some_dest": relationship(Dest)}) mapper(B, b_table, inherits=A, concrete=True) mapper(Dest, dest_table) b = B() dest = Dest() assert_raises(AttributeError, setattr, b, "some_dest", dest) clear_mappers() mapper(A, a_table, properties={"a_id": a_table.c.id}) mapper(B, b_table, inherits=A, concrete=True) mapper(Dest, dest_table) b = B() assert_raises(AttributeError, setattr, b, "a_id", 3) clear_mappers() mapper(A, a_table, properties={"a_id": a_table.c.id}) mapper(B, b_table, inherits=A, concrete=True) mapper(Dest, dest_table)
def test_ddl_execute(self): engine = create_engine("sqlite:///") cx = engine.connect() table = self.users ddl = DDL("SELECT 1") for spec in ( (engine.execute, ddl), (engine.execute, ddl, table), (cx.execute, ddl), (cx.execute, ddl, table), (ddl.execute, engine), (ddl.execute, engine, table), (ddl.execute, cx), (ddl.execute, cx, table), ): fn = spec[0] arg = spec[1:] r = fn(*arg) eq_(list(r), [(1, )]) for fn, kw in ((ddl.execute, {}), (ddl.execute, dict(target=table))): assert_raises(tsa.exc.UnboundExecutionError, fn, **kw) for bind in engine, cx: ddl.bind = bind for fn, kw in ( (ddl.execute, {}), (ddl.execute, dict(target=table)), ): r = fn(**kw) eq_(list(r), [(1, )])
def test_invalidate_trans(self): conn = self.db.connect() trans = conn.begin() self.dbapi.shutdown() assert_raises(tsa.exc.DBAPIError, conn.execute, select([1])) eq_([c.close.mock_calls for c in self.dbapi.connections], [[call()]]) assert not conn.closed assert conn.invalidated assert trans.is_active assert_raises_message( tsa.exc.StatementError, "Can't reconnect until invalid transaction is rolled back", conn.execute, select([1]), ) assert trans.is_active assert_raises_message( tsa.exc.InvalidRequestError, "Can't reconnect until invalid transaction is rolled back", trans.commit, ) assert trans.is_active trans.rollback() assert not trans.is_active conn.execute(select([1])) assert not conn.invalidated eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], []], )
def test_not_instantiatable(self): class Point(object): pass self._fixture(Point) alias = aliased(Point) assert_raises(TypeError, alias)
def test_cx_oracle_service_name_bad(self): url_string = "oracle+cx_oracle://scott:tiger@host/hr1?service_name=hr2" assert_raises( exc.InvalidRequestError, create_engine, url_string, _initialize=False, )
def test_reconnect(self): """test that an 'is_disconnect' condition will invalidate the connection, and additionally dispose the previous connection pool and recreate.""" # make a connection conn = self.db.connect() # connection works conn.execute(select([1])) # create a second connection within the pool, which we'll ensure # also goes away conn2 = self.db.connect() conn2.close() # two connections opened total now assert len(self.dbapi.connections) == 2 # set it to fail self.dbapi.shutdown() assert_raises(tsa.exc.DBAPIError, conn.execute, select([1])) # assert was invalidated assert not conn.closed assert conn.invalidated # close shouldn't break conn.close() # ensure one connection closed... eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], []], ) conn = self.db.connect() eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], [call()], []], ) conn.execute(select([1])) conn.close() eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], [call()], []], )
def test_detached_raise(self): User, Address = self._user_address_fixture() sess = create_session() u = sess.query(User).get(8) sess.expunge(u) assert_raises( orm_exc.DetachedInstanceError, u.addresses.filter_by, email_address="e", )
def test_pre_ping_db_stays_shutdown(self): engine = engines.reconnecting_engine(options={"pool_pre_ping": True}) conn = engine.connect() eq_(conn.execute(select([1])).scalar(), 1) conn.close() engine.test_shutdown(stop=True) assert_raises(exc.DBAPIError, engine.connect)
def test_explode_in_initializer(self): engine = engines.testing_engine() def broken_initialize(connection): connection.execute("select fake_stuff from _fake_table") engine.dialect.initialize = broken_initialize # raises a DBAPIError, not an AttributeError assert_raises(exc.DBAPIError, engine.connect)
def test_get_attribute_error(self): Base = declarative_base() class A(Base): __tablename__ = "a" id = Column("id", Integer, primary_key=True) array = Column("_array", ARRAY(Integer)) first = index_property("array", 1) a = A(array=[]) assert_raises(AttributeError, lambda: a.first)
def test_inheriting(self): A, B, b_table, a_table, Dest, dest_table = ( self.classes.A, self.classes.B, self.tables.b_table, self.tables.a_table, self.classes.Dest, self.tables.dest_table, ) mapper( A, a_table, properties={ "some_dest": relationship(Dest, back_populates="many_a") }, ) mapper( B, b_table, inherits=A, concrete=True, properties={ "some_dest": relationship(Dest, back_populates="many_b") }, ) mapper( Dest, dest_table, properties={ "many_a": relationship(A, back_populates="some_dest"), "many_b": relationship(B, back_populates="some_dest"), }, ) sess = sessionmaker()() dest1 = Dest(name="c1") dest2 = Dest(name="c2") a1 = A(some_dest=dest1, aname="a1") a2 = A(some_dest=dest2, aname="a2") b1 = B(some_dest=dest1, bname="b1") b2 = B(some_dest=dest1, bname="b2") assert_raises(AttributeError, setattr, b1, "aname", "foo") assert_raises(AttributeError, getattr, A, "bname") assert dest2.many_a == [a2] assert dest1.many_a == [a1] assert dest1.many_b == [b1, b2] sess.add_all([dest1, dest2]) sess.commit() assert sess.query(Dest).filter(Dest.many_a.contains(a2)).one() is dest2 assert dest2.many_a == [a2] assert dest1.many_a == [a1] assert dest1.many_b == [b1, b2] assert sess.query(B).filter(B.bname == "b1").one() is b1
def test_too_long_name_disallowed(self): m = MetaData() t = Table( "this_name_is_too_long_for_what_were_doing_in_this_test", m, Column("foo", Integer), ) eng = self._engine_fixture() methods = (t.create, t.drop, m.create_all, m.drop_all) for meth in methods: assert_raises(exceptions.IdentifierError, meth, eng)
def test_no_null(self): set_table = self._set_fixture_one() set_table.create() assert_raises( exc.DBAPIError, set_table.insert().execute, e1=None, e2=None, e3=None, e4=None, )
def test_oursql_error_one(self): set_table = self._set_fixture_one() set_table.create() assert_raises( exc.StatementError, set_table.insert().execute, e1="c", e2="c", e3="c", e4="c", )
def test_insert_literal_binds_sequence_notimplemented(self): table = Table("x", MetaData(), Column("y", Integer, Sequence("y_seq"))) dialect = default.DefaultDialect() dialect.supports_sequences = True stmt = table.insert().values(myid=3, name="jack") assert_raises( NotImplementedError, stmt.compile, compile_kwargs=dict(literal_binds=True), dialect=dialect, )
def _assert_errorhandler(self, outconverter, has_errorhandler): data = ue("\uee2c\u9a66") # this is u"\uee2c\u9a66" utf8_w_errors = data.encode("utf-16") if has_errorhandler: eq_( outconverter(utf8_w_errors), data.encode("utf-16").decode("utf-8", "ignore"), ) else: assert_raises(UnicodeDecodeError, outconverter, utf8_w_errors)
def test_value_is_none_attributeerror(self): Base = declarative_base() class A(Base): __tablename__ = "a" id = Column("id", Integer, primary_key=True) array = Column("_array", ARRAY(Integer)) first = index_property("array", 1) a = A() assert_raises(AttributeError, getattr, a, "first") assert_raises(AttributeError, delattr, a, "first")
def test_pop(self): sess = Session() f1 = Foo(data=set([1])) sess.add(f1) sess.commit() eq_(f1.data.pop(), 1) sess.commit() assert_raises(KeyError, f1.data.pop) eq_(f1.data, set())
def test_pop(self): sess = Session() f1 = Foo(data={"a": "b", "c": "d"}) sess.add(f1) sess.commit() eq_(f1.data.pop("a"), "b") sess.commit() assert_raises(KeyError, f1.data.pop, "g") eq_(f1.data, {"c": "d"})
def test_standalone_orphans(self): if self.redefine_colprop: person_attribute_name = "person_name" else: person_attribute_name = "name" session = Session() daboss = Boss(status="BBB", manager_name="boss", golf_swing="fore", **{person_attribute_name: "daboss"}) session.add(daboss) assert_raises(sa_exc.DBAPIError, session.flush)
def test_pop(self): sess = Session() f1 = Foo(data=[1, 2, 3]) sess.add(f1) sess.commit() eq_(f1.data.pop(), 3) eq_(f1.data.pop(0), 1) sess.commit() assert_raises(IndexError, f1.data.pop, 5) eq_(f1.data, [2])
def test_ensure_is_disconnect_gets_connection(self): def is_disconnect(e, conn, cursor): # connection is still present assert conn.connection is not None # the error usually occurs on connection.cursor(), # though MySQLdb we get a non-working cursor. # assert cursor is None self.engine.dialect.is_disconnect = is_disconnect conn = self.engine.connect() self.engine.test_shutdown() with expect_warnings("An exception has occurred during handling .*", py2konly=True): assert_raises(tsa.exc.DBAPIError, conn.execute, select([1]))