def test_savepoint_lost_still_runs(self): User = self.classes.User s = self.session(bind=self.bind) trans = s.begin_nested() s.connection() u1 = User(name='ed') s.add(u1) # kill off the transaction nested_trans = trans._connections[self.bind][1] nested_trans._do_commit() is_(s.transaction, trans) assert_raises( sa_exc.DBAPIError, s.rollback ) assert u1 not in s.new is_(trans._state, _session.CLOSED) is_not_(s.transaction, trans) is_(s.transaction._state, _session.ACTIVE) is_(s.transaction.nested, False) is_(s.transaction._parent, None)
def test_conn_reusable(self): conn = self.db.connect() conn.execute(select([1])) eq_( self.dbapi.connect.mock_calls, [self.mock_connect] ) self.dbapi.shutdown() assert_raises( tsa.exc.DBAPIError, conn.execute, select([1]) ) assert not conn.closed assert conn.invalidated eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()]] ) # test reconnects conn.execute(select([1])) assert not conn.invalidated eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], []] )
def test_deleted_flag(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = sessionmaker()() u1 = User(name='u1') sess.add(u1) sess.commit() sess.delete(u1) sess.flush() assert u1 not in sess assert_raises(sa.exc.InvalidRequestError, sess.add, u1) sess.rollback() assert u1 in sess sess.delete(u1) sess.commit() assert u1 not in sess assert_raises(sa.exc.InvalidRequestError, sess.add, u1) make_transient(u1) sess.add(u1) sess.commit() eq_(sess.query(User).count(), 1)
def test_row_case_sensitive(self): row = testing.db.execute( select([ literal_column("1").label("case_insensitive"), literal_column("2").label("CaseSensitive") ]) ).first() eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"]) in_("case_insensitive", row._keymap) in_("CaseSensitive", row._keymap) not_in_("casesensitive", row._keymap) eq_(row["case_insensitive"], 1) eq_(row["CaseSensitive"], 2) assert_raises( KeyError, lambda: row["Case_insensitive"] ) assert_raises( KeyError, lambda: row["casesensitive"] )
def test_noninherited_warning(self): A, B, b_table, a_table, Dest, dest_table = ( self.classes.A, self.classes.B, self.tables.b_table, self.tables.a_table, self.classes.Dest, self.tables.dest_table, ) mapper(A, a_table, properties={"some_dest": relationship(Dest)}) mapper(B, b_table, inherits=A, concrete=True) mapper(Dest, dest_table) b = B() dest = Dest() assert_raises(AttributeError, setattr, b, "some_dest", dest) clear_mappers() mapper(A, a_table, properties={"a_id": a_table.c.id}) mapper(B, b_table, inherits=A, concrete=True) mapper(Dest, dest_table) b = B() assert_raises(AttributeError, setattr, b, "a_id", 3) clear_mappers() mapper(A, a_table, properties={"a_id": a_table.c.id}) mapper(B, b_table, inherits=A, concrete=True) mapper(Dest, dest_table)
def test_not_instantiatable(self): class Point(object): pass table = self._fixture(Point) alias = aliased(Point) assert_raises(TypeError, alias)
def test_foreignkey_missing_insert(self): Table("t1", self.metadata, Column("id", Integer, primary_key=True)) t2 = Table( "t2", self.metadata, Column("id", Integer, ForeignKey("t1.id"), primary_key=True), ) self.metadata.create_all() # want to ensure that "null value in column "id" violates not- # null constraint" is raised (IntegrityError on psycoopg2, but # ProgrammingError on pg8000), and not "ProgrammingError: # (ProgrammingError) relationship "t2_id_seq" does not exist". # the latter corresponds to autoincrement behavior, which is not # the case here due to the foreign key. for eng in [ engines.testing_engine(options={"implicit_returning": False}), engines.testing_engine(options={"implicit_returning": True}), ]: with expect_warnings( ".*has no Python-side or server-side default.*" ): assert_raises( (exc.IntegrityError, exc.ProgrammingError), eng.execute, t2.insert(), )
def test_too_long_idx_name(self): dialect = testing.db.dialect.__class__() for max_ident, max_index in [(22, None), (256, 22)]: dialect.max_identifier_length = max_ident dialect.max_index_name_length = max_index for tname, cname, exp in [ ('sometable', 'this_name_is_too_long', 'ix_sometable_t_09aa'), ('sometable', 'this_name_alsois_long', 'ix_sometable_t_3cf1'), ]: t1 = Table(tname, MetaData(), Column(cname, Integer, index=True), ) ix1 = list(t1.indexes)[0] self.assert_compile( schema.CreateIndex(ix1), "CREATE INDEX %s " "ON %s (%s)" % (exp, tname, cname), dialect=dialect ) dialect.max_identifier_length = 22 dialect.max_index_name_length = None t1 = Table('t', MetaData(), Column('c', Integer)) assert_raises( exc.IdentifierError, schema.CreateIndex(Index( "this_other_name_is_too_long_for_what_were_doing", t1.c.c)).compile, dialect=dialect )
def test_clauseelement(self): metadata = MetaData() table = Table('test_table', metadata, Column('foo', Integer)) metadata.create_all(bind=testing.db) try: for elem in [ table.select, lambda **kwargs: sa.func.current_timestamp(**kwargs).select(), # func.current_timestamp().select, lambda **kwargs:text("select * from test_table", **kwargs) ]: for bind in ( testing.db, testing.db.connect() ): try: e = elem(bind=bind) assert e.bind is bind e.execute().close() finally: if isinstance(bind, engine.Connection): bind.close() e = elem() assert e.bind is None assert_raises( exc.UnboundExecutionError, e.execute ) finally: if isinstance(bind, engine.Connection): bind.close() metadata.drop_all(bind=testing.db)
def test_w_mapper_versioning(self): class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(50)) SomeClass.__mapper__.version_id_col = SomeClass.__table__.c.version self.create_tables() sess = self.session sc = SomeClass(name="sc1") sess.add(sc) sess.commit() s2 = Session(sess.bind) sc2 = s2.query(SomeClass).first() sc2.name = "sc1modified" sc.name = "sc1modified_again" sess.commit() eq_(sc.version, 2) assert_raises(orm_exc.StaleDataError, s2.flush)
def test_notcallable(self): class Point(object): pass table = self.point_map(Point) alias = aliased(Point) assert_raises(TypeError, alias)
def test_scalar(self): users = self.tables.users canary = Mock() class User(fixtures.ComparableEntity): @validates('name') def validate_name(self, key, name): canary(key, name) ne_(name, 'fred') return name + ' modified' mapper(User, users) sess = Session() u1 = User(name='ed') eq_(u1.name, 'ed modified') assert_raises(AssertionError, setattr, u1, "name", "fred") eq_(u1.name, 'ed modified') eq_(canary.mock_calls, [call('name', 'ed'), call('name', 'fred')]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name='ed modified').one(), User(name='ed') )
def test_versioncheck_for_update(self): """query.with_lockmode performs a 'version check' on an already loaded instance""" Foo = self.classes.Foo s1 = self._fixture() f1s1 = Foo(value='f1 value') s1.add(f1s1) s1.commit() s2 = create_session(autocommit=False) f1s2 = s2.query(Foo).get(f1s1.id) s2.refresh(f1s2, lockmode='update') f1s2.value='f1 new value' assert_raises( exc.DBAPIError, s1.refresh, f1s1, lockmode='update_nowait' ) s1.rollback() s2.commit() s1.refresh(f1s1, lockmode='update_nowait') assert f1s1.version_id == f1s2.version_id
def test_rollback_recover(self): users, User = self.tables.users, self.classes.User mapper(User, users) session = sessionmaker()() u1, u2, u3 = User(name='u1'), User(name='u2'), User(name='u3') session.add_all([u1, u2, u3]) session.commit() session.delete(u2) u4 = User(name='u2') session.add(u4) session.flush() u5 = User(name='u3') session.add(u5) assert_raises(orm_exc.FlushError, session.flush) assert u5 not in session assert u2 not in session.deleted session.rollback()
def test_collection(self): users, addresses, Address = (self.tables.users, self.tables.addresses, self.classes.Address) canary = Mock() class User(fixtures.ComparableEntity): @validates('addresses') def validate_address(self, key, ad): canary(key, ad) assert '@' in ad.email_address return ad mapper(User, users, properties={ 'addresses': relationship(Address)} ) mapper(Address, addresses) sess = Session() u1 = User(name='edward') a0 = Address(email_address='noemail') assert_raises(AssertionError, u1.addresses.append, a0) a1 = Address(id=15, email_address='*****@*****.**') u1.addresses.append(a1) eq_(canary.mock_calls, [call('addresses', a0), call('addresses', a1)]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name='edward').one(), User(name='edward', addresses=[Address(email_address='*****@*****.**')]) )
def test_pk_violation_with_savepoint(self): User, Address = self.classes.User, self.classes.Address s = self.session() a1 = Address(email_address='foo') u1 = User(id=1, name='ed', addresses=[a1]) s.add(u1) s.commit() a2 = Address(email_address='bar') u2 = User(id=1, name='jack', addresses=[a2]) u1.name = 'edward' a1.email_address = 'foober' s.begin_nested() s.add(u2) assert_raises(orm_exc.FlushError, s.commit) assert_raises(sa_exc.InvalidRequestError, s.commit) s.rollback() assert u2 not in s assert a2 not in s assert u1 in s assert a1 in s s.commit() eq_( s.query(User).all(), [ User( id=1, name='edward', addresses=[Address(email_address='foober')])])
def test_accounting_commit_fails_delete(self): User = self.classes.User sess = create_session(autocommit=True) fail = False def fail_fn(*arg, **kw): if fail: raise Exception("commit fails") event.listen(sess, "after_flush_postexec", fail_fn) u1 = User(name='ed') sess.add(u1) sess.flush() sess.delete(u1) fail = True assert_raises( Exception, sess.flush ) fail = False assert u1 in sess assert u1 not in sess.deleted sess.delete(u1) sess.flush() assert u1 not in sess eq_( sess.query(User.name).order_by(User.name).all(), [] )
def test_string_dates_passed_raise(self): assert_raises( exc.StatementError, testing.db.execute, select([1]).where(bindparam("date", type_=Date)), date=str(datetime.date(2007, 10, 30)), )
def test_invalidate_conn_interrupt_nodisconnect_workaround(self): # test [ticket:3803] workaround for no disconnect on keyboard interrupt @event.listens_for(self.db, "handle_error") def cancel_disconnect(ctx): ctx.is_disconnect = False pool = self.db.pool conn = self.db.connect() self.dbapi.shutdown("interrupt_dont_break") def go(): with conn.begin(): conn.execute(select([1])) assert_raises( MockExitIsh, go ) assert not conn.invalidated eq_(pool._invalidate_time, 0) # pool not invalidated conn.execute(select([1])) assert not conn.invalidated
def test_conn_reusable(self): conn = db.connect() conn.execute(select([1])) assert len(dbapi.connections) == 1 dbapi.shutdown() assert_raises( tsa.exc.DBAPIError, conn.execute, select([1]) ) assert not conn.closed assert conn.invalidated # ensure all connections closed (pool was recycled) gc_collect() assert len(dbapi.connections) == 0 # test reconnects conn.execute(select([1])) assert not conn.invalidated assert len(dbapi.connections) == 1
def test_symmetric_difference(self): _, _, twin1, twin2, _, _ = self._create_sets() # basic set math set1 = util.IdentitySet([1, 2, 3]) set2 = util.IdentitySet([2, 3, 4]) eq_(set1.symmetric_difference(set2), util.IdentitySet([1, 4])) eq_(set2.symmetric_difference(set1), util.IdentitySet([1, 4])) # empty sets empty = util.IdentitySet([]) eq_(empty.symmetric_difference(empty), empty) # the same sets eq_(twin1.symmetric_difference(twin2), empty) eq_(twin2.symmetric_difference(twin1), empty) # totally different sets unique1 = util.IdentitySet([1]) unique2 = util.IdentitySet([2]) eq_(unique1.symmetric_difference(unique2), util.IdentitySet([1, 2])) eq_(unique2.symmetric_difference(unique1), util.IdentitySet([1, 2])) # not an IdentitySet not_an_identity_set = object() assert_raises( TypeError, unique1.symmetric_difference, not_an_identity_set)
def test_dunder_xor(self): _, _, twin1, twin2, _, _ = self._create_sets() # basic set math set1 = util.IdentitySet([1, 2, 3]) set2 = util.IdentitySet([2, 3, 4]) eq_(set1 ^ set2, util.IdentitySet([1, 4])) eq_(set2 ^ set1, util.IdentitySet([1, 4])) # empty sets empty = util.IdentitySet([]) eq_(empty ^ empty, empty) # the same sets eq_(twin1 ^ twin2, empty) eq_(twin2 ^ twin1, empty) # totally different sets unique1 = util.IdentitySet([1]) unique2 = util.IdentitySet([2]) eq_(unique1 ^ unique2, util.IdentitySet([1, 2])) eq_(unique2 ^ unique1, util.IdentitySet([1, 2])) # not an IdentitySet def should_raise(): not_an_identity_set = object() return unique1 ^ not_an_identity_set assert_raises(TypeError, should_raise)
def test_dunder_or(self): super_, sub_, twin1, twin2, _, _ = self._create_sets() # basic set math eq_(sub_ | super_, super_) eq_(super_ | sub_, super_) # the same sets eq_(twin1 | twin2, twin1) eq_(twin2 | twin1, twin1) # empty sets empty = util.IdentitySet([]) eq_(empty | empty, empty) # totally different sets unique1 = util.IdentitySet([1]) unique2 = util.IdentitySet([2]) eq_(unique1 | unique2, util.IdentitySet([1, 2])) # not an IdentitySet def should_raise(): not_an_identity_set = object() return unique1 | not_an_identity_set assert_raises(TypeError, should_raise)
def test_dunder_sub(self): _, _, twin1, twin2, _, _ = self._create_sets() # basic set math set1 = util.IdentitySet([1, 2, 3]) set2 = util.IdentitySet([2, 3, 4]) eq_(set1 - set2, util.IdentitySet([1])) eq_(set2 - set1, util.IdentitySet([4])) # empty sets empty = util.IdentitySet([]) eq_(empty - empty, empty) # the same sets eq_(twin1 - twin2, empty) eq_(twin2 - twin1, empty) # totally different sets unique1 = util.IdentitySet([1]) unique2 = util.IdentitySet([2]) eq_(unique1 - unique2, util.IdentitySet([1])) eq_(unique2 - unique1, util.IdentitySet([2])) # not an IdentitySet def should_raise(): not_an_identity_set = object() unique1 - not_an_identity_set assert_raises(TypeError, should_raise)
def test_reset_on_return(self): dbapi = MockDBAPI( foober=12, lala=18, hoho={"this": "dict"}, fooz="somevalue" ) for (value, expected) in [ ("rollback", pool.reset_rollback), ("commit", pool.reset_commit), (None, pool.reset_none), (True, pool.reset_rollback), (False, pool.reset_none), ]: e = create_engine( "postgresql://", pool_reset_on_return=value, module=dbapi, _initialize=False, ) assert e.pool._reset_on_return is expected assert_raises( exc.ArgumentError, create_engine, "postgresql://", pool_reset_on_return="hi", module=dbapi, _initialize=False, )
def test_pk_violation(self): User, Address = self.classes.User, self.classes.Address s = self.session() a1 = Address(email_address="foo") u1 = User(id=1, name="ed", addresses=[a1]) s.add(u1) s.commit() a2 = Address(email_address="bar") u2 = User(id=1, name="jack", addresses=[a2]) u1.name = "edward" a1.email_address = "foober" s.add(u2) assert_raises(orm_exc.FlushError, s.commit) assert_raises(sa_exc.InvalidRequestError, s.commit) s.rollback() assert u2 not in s assert a2 not in s assert u1 in s assert a1 in s assert u1.name == "ed" assert a1.email_address == "foo" u1.name = "edward" a1.email_address = "foober" s.commit() eq_(s.query(User).all(), [User(id=1, name="edward", addresses=[Address(email_address="foober")])])
def test_no_selects(self): Subset, common = self.classes.Subset, self.tables.common subset_select = select([common.c.id, common.c.data]) assert_raises( sa.exc.InvalidRequestError, mapper, Subset, subset_select )
def test_can_use_session_in_outer_rollback_hook(self): User, users = self.classes.User, self.tables.users mapper(User, users) sess = Session() assertions = [] @event.listens_for(sess, "after_soft_rollback") def do_something(session, previous_transaction): if session.is_active: assertions.append('name' not in u.__dict__) assertions.append(u.name == 'u1') u = User(name='u1', id=1) sess.add(u) sess.commit() u2 = User(name='u1', id=1) sess.add(u2) assert_raises( sa.orm.exc.FlushError, sess.commit ) sess.rollback() eq_(assertions, [True, True])
def test_rollback_hook(self): User, users = self.classes.User, self.tables.users sess, canary = self._listener_fixture() mapper(User, users) u = User(name='u1', id=1) sess.add(u) sess.commit() u2 = User(name='u1', id=1) sess.add(u2) assert_raises( sa.orm.exc.FlushError, sess.commit ) sess.rollback() eq_(canary, ['before_attach', 'after_attach', 'before_commit', 'before_flush', 'after_transaction_create', 'after_begin', 'after_flush', 'after_flush_postexec', 'after_transaction_end', 'after_commit', 'after_transaction_end', 'after_transaction_create', 'before_attach', 'after_attach', 'before_commit', 'before_flush', 'after_transaction_create', 'after_begin', 'after_rollback', 'after_transaction_end', 'after_soft_rollback', 'after_transaction_end','after_transaction_create', 'after_soft_rollback'])
def test_explode_in_initializer(self): engine = engines.testing_engine() def broken_initialize(connection): connection.execute("select fake_stuff from _fake_table") engine.dialect.initialize = broken_initialize # raises a DBAPIError, not an AttributeError assert_raises(exc.DBAPIError, engine.connect) # dispose connections so we get a new one on # next go engine.dispose() p1 = engine.pool def is_disconnect(e, conn, cursor): return True engine.dialect.is_disconnect = is_disconnect # invalidate() also doesn't screw up assert_raises(exc.DBAPIError, engine.connect) # pool was recreated assert engine.pool is not p1
def test_bad_args(self): assert_raises(exc.ArgumentError, create_engine, 'foobar://', module=mock_dbapi) # bad arg assert_raises(TypeError, create_engine, 'postgresql://', use_ansi=True, module=mock_dbapi) # bad arg assert_raises( TypeError, create_engine, 'oracle://', lala=5, use_ansi=True, module=mock_dbapi, ) assert_raises(TypeError, create_engine, 'postgresql://', lala=5, module=mock_dbapi) assert_raises(TypeError, create_engine, 'sqlite://', lala=5, module=mock_sqlite_dbapi) assert_raises(TypeError, create_engine, 'mysql+mysqldb://', use_unicode=True, module=mock_dbapi)
def test_roundtrip(self): if with_polymorphic == 'unions': if include_base: person_join = polymorphic_union( { 'engineer': people.join(engineers), 'manager': people.join(managers), 'person': people.select(people.c.type == 'person'), }, None, 'pjoin') else: person_join = polymorphic_union( { 'engineer': people.join(engineers), 'manager': people.join(managers), }, None, 'pjoin') manager_join = people.join(managers).outerjoin(boss) person_with_polymorphic = ['*', person_join] manager_with_polymorphic = ['*', manager_join] elif with_polymorphic == 'joins': person_join = people.outerjoin(engineers).outerjoin(managers).\ outerjoin(boss) manager_join = people.join(managers).outerjoin(boss) person_with_polymorphic = ['*', person_join] manager_with_polymorphic = ['*', manager_join] elif with_polymorphic == 'auto': person_with_polymorphic = '*' manager_with_polymorphic = '*' else: person_with_polymorphic = None manager_with_polymorphic = None if redefine_colprop: person_mapper = mapper(Person, people, with_polymorphic=person_with_polymorphic, polymorphic_on=people.c.type, polymorphic_identity='person', properties={'person_name': people.c.name}) else: person_mapper = mapper(Person, people, with_polymorphic=person_with_polymorphic, polymorphic_on=people.c.type, polymorphic_identity='person') mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engineer') mapper(Manager, managers, inherits=person_mapper, with_polymorphic=manager_with_polymorphic, polymorphic_identity='manager') mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss') mapper(Company, companies, properties={ 'employees': relationship(Person, lazy=lazy_relationship, cascade="all, delete-orphan", backref="company", order_by=people.c.person_id) }) if redefine_colprop: person_attribute_name = 'person_name' else: person_attribute_name = 'name' employees = [ Manager(status='AAB', manager_name='manager1', **{person_attribute_name: 'pointy haired boss'}), Engineer(status='BBA', engineer_name='engineer1', primary_language='java', **{person_attribute_name: 'dilbert'}), ] if include_base: employees.append(Person(**{person_attribute_name: 'joesmith'})) employees += [ Engineer(status='CGG', engineer_name='engineer2', primary_language='python', **{person_attribute_name: 'wally'}), Manager(status='ABA', manager_name='manager2', **{person_attribute_name: 'jsmith'}) ] pointy = employees[0] jsmith = employees[-1] dilbert = employees[1] session = create_session() c = Company(name='company1') c.employees = employees session.add(c) session.flush() session.expunge_all() eq_(session.query(Person).get(dilbert.person_id), dilbert) session.expunge_all() eq_( session.query(Person).filter( Person.person_id == dilbert.person_id).one(), dilbert) session.expunge_all() def go(): cc = session.query(Company).get(c.company_id) eq_(cc.employees, employees) if not lazy_relationship: if with_polymorphic != 'none': self.assert_sql_count(testing.db, go, 1) else: self.assert_sql_count(testing.db, go, 5) else: if with_polymorphic != 'none': self.assert_sql_count(testing.db, go, 2) else: self.assert_sql_count(testing.db, go, 6) # test selecting from the query, using the base # mapped table (people) as the selection criterion. # in the case of the polymorphic Person query, # the "people" selectable should be adapted to be "person_join" eq_( session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first(), dilbert) assert session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first().person_id eq_( session.query(Engineer).filter( getattr(Person, person_attribute_name) == 'dilbert').first(), dilbert) # test selecting from the query, joining against # an alias of the base "people" table. test that # the "palias" alias does *not* get sucked up # into the "person_join" conversion. palias = people.alias("palias") dilbert = session.query(Person).get(dilbert.person_id) is_( dilbert, session.query(Person).filter((palias.c.name == 'dilbert') & ( palias.c.person_id == Person.person_id)).first()) is_( dilbert, session.query(Engineer).filter((palias.c.name == 'dilbert') & ( palias.c.person_id == Person.person_id)).first()) is_( dilbert, session.query(Person).filter( (Engineer.engineer_name == "engineer1") & (engineers.c.person_id == people.c.person_id)).first()) is_( dilbert, session.query(Engineer).filter( Engineer.engineer_name == "engineer1")[0]) session.flush() session.expunge_all() def go(): session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first() self.assert_sql_count(testing.db, go, 1) session.expunge_all() dilbert = session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first() def go(): # assert that only primary table is queried for # already-present-in-session d = session.query(Person).filter( getattr(Person, person_attribute_name) == 'dilbert').first() self.assert_sql_count(testing.db, go, 1) # test standalone orphans daboss = Boss(status='BBB', manager_name='boss', golf_swing='fore', **{person_attribute_name: 'daboss'}) session.add(daboss) assert_raises(sa_exc.DBAPIError, session.flush) c = session.query(Company).first() daboss.company = c manager_list = [e for e in c.employees if isinstance(e, Manager)] session.flush() session.expunge_all() eq_( session.query(Manager).order_by(Manager.person_id).all(), manager_list) c = session.query(Company).first() session.delete(c) session.flush() eq_(select([func.count('*')]).select_from(people).scalar(), 0)
def test_enum(self): """Exercise the ENUM type.""" with testing.expect_deprecated('Manually quoting ENUM value literals'): e1, e2 = mysql.ENUM("'a'", "'b'"), mysql.ENUM("'a'", "'b'") enum_table = Table( 'mysql_enum', self.metadata, Column('e1', e1), Column('e2', e2, nullable=False), Column('e2generic', Enum("a", "b"), nullable=False), Column('e3', mysql.ENUM("'a'", "'b'", strict=True)), Column('e4', mysql.ENUM("'a'", "'b'", strict=True), nullable=False), Column('e5', mysql.ENUM("a", "b")), Column('e5generic', Enum("a", "b")), Column('e6', mysql.ENUM("'a'", "b")), ) eq_(colspec(enum_table.c.e1), "e1 ENUM('a','b')") eq_(colspec(enum_table.c.e2), "e2 ENUM('a','b') NOT NULL") eq_(colspec(enum_table.c.e2generic), "e2generic ENUM('a','b') NOT NULL") eq_(colspec(enum_table.c.e3), "e3 ENUM('a','b')") eq_(colspec(enum_table.c.e4), "e4 ENUM('a','b') NOT NULL") eq_(colspec(enum_table.c.e5), "e5 ENUM('a','b')") eq_(colspec(enum_table.c.e5generic), "e5generic ENUM('a','b')") eq_(colspec(enum_table.c.e6), "e6 ENUM('''a''','b')") enum_table.create() assert_raises(exc.DBAPIError, enum_table.insert().execute, e1=None, e2=None, e3=None, e4=None) assert_raises(exc.StatementError, enum_table.insert().execute, e1='c', e2='c', e2generic='c', e3='c', e4='c', e5='c', e5generic='c', e6='c') enum_table.insert().execute() enum_table.insert().execute(e1='a', e2='a', e2generic='a', e3='a', e4='a', e5='a', e5generic='a', e6="'a'") enum_table.insert().execute(e1='b', e2='b', e2generic='b', e3='b', e4='b', e5='b', e5generic='b', e6='b') res = enum_table.select().execute().fetchall() expected = [(None, 'a', 'a', None, 'a', None, None, None), ('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"), ('b', 'b', 'b', 'b', 'b', 'b', 'b', 'b')] eq_(res, expected)
def test_begin_nested_requires_trans(self): sess = create_session(autocommit=True) assert_raises(sa_exc.InvalidRequestError, sess.begin_nested)
def test_offset_or_limit_role_only_ints_or_clauseelement(self): assert_raises(ValueError, select(t).limit, "some limit") assert_raises(ValueError, select(t).offset, "some offset")
def test_reconnect(self): """test that an 'is_disconnect' condition will invalidate the connection, and additionally dispose the previous connection pool and recreate.""" db_pool = self.db.pool # make a connection conn = self.db.connect() # connection works conn.execute(select([1])) # create a second connection within the pool, which we'll ensure # also goes away conn2 = self.db.connect() conn2.close() # two connections opened total now assert len(self.dbapi.connections) == 2 # set it to fail self.dbapi.shutdown() assert_raises( tsa.exc.DBAPIError, conn.execute, select([1]) ) # assert was invalidated assert not conn.closed assert conn.invalidated # close shouldn't break conn.close() # ensure one connection closed... eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], []] ) conn = self.db.connect() eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], [call()], []] ) conn.execute(select([1])) conn.close() eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], [call()], []] )
def go(): assert_raises( orm_exc.FlushError, sess.flush )
def test_object_session_raises(self): User = self.classes.User assert_raises(orm_exc.UnmappedInstanceError, object_session, object()) assert_raises(orm_exc.UnmappedInstanceError, object_session, User())
def _assert_data_noautoincrement(self, table): engine = engines.testing_engine(options={"implicit_returning": False}) with engine.connect() as conn: conn.execute(table.insert(), {"id": 30, "data": "d1"}) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, ) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, {"data": "d3"}, ) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, ) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, {"data": "d3"}, ) conn.execute( table.insert(), { "id": 31, "data": "d2" }, { "id": 32, "data": "d3" }, ) conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"}) eq_( conn.execute(table.select()).fetchall(), [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")], ) conn.execute(table.delete()) # test the same series of events using a reflected version of # the table m2 = MetaData(engine) table = Table(table.name, m2, autoload=True) with engine.connect() as conn: conn.execute(table.insert(), {"id": 30, "data": "d1"}) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, ) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, {"data": "d3"}, ) conn.execute( table.insert(), { "id": 31, "data": "d2" }, { "id": 32, "data": "d3" }, ) conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"}) eq_( conn.execute(table.select()).fetchall(), [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")], )
def test_kw_plus_opt_sig(self): cls, canary = self._kw_only_fixture() assert_raises(TypeError, cls, "a", "b", "c") assert_raises(TypeError, cls, "a", "b", c="c")
def x_raises_(obj, method, *args, **kw): watchdog.add(method) callable_ = getattr(obj, method) assert_raises(sa.orm.exc.UnmappedInstanceError, callable_, *args, **kw)
def test_scalar_proxy(self): metadata = self.metadata parents_table = Table( 'Parent', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(128))) children_table = Table( 'Children', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('parent_id', Integer, ForeignKey('Parent.id')), Column('foo', String(128)), Column('bar', String(128)), Column('baz', String(128))) class Parent(object): foo = association_proxy('child', 'foo') bar = association_proxy('child', 'bar', creator=lambda v: Child(bar=v)) baz = association_proxy('child', 'baz', creator=lambda v: Child(baz=v)) def __init__(self, name): self.name = name class Child(object): def __init__(self, **kw): for attr in kw: setattr(self, attr, kw[attr]) mapper(Parent, parents_table, properties={ 'child': relationship(Child, lazy='joined', backref='parent', uselist=False) }) mapper(Child, children_table) metadata.create_all() session = create_session() def roundtrip(obj): if obj not in session: session.add(obj) session.flush() id, type_ = obj.id, type(obj) session.expunge_all() return session.query(type_).get(id) p = Parent('p') eq_(p.child, None) eq_(p.foo, None) p.child = Child(foo='a', bar='b', baz='c') self.assert_(p.foo == 'a') self.assert_(p.bar == 'b') self.assert_(p.baz == 'c') p.bar = 'x' self.assert_(p.foo == 'a') self.assert_(p.bar == 'x') self.assert_(p.baz == 'c') p = roundtrip(p) self.assert_(p.foo == 'a') self.assert_(p.bar == 'x') self.assert_(p.baz == 'c') p.child = None eq_(p.foo, None) # Bogus creator for this scalar type assert_raises(TypeError, setattr, p, "foo", "zzz") p.bar = 'yyy' self.assert_(p.foo is None) self.assert_(p.bar == 'yyy') self.assert_(p.baz is None) del p.child p = roundtrip(p) self.assert_(p.child is None) p.baz = 'xxx' self.assert_(p.foo is None) self.assert_(p.bar is None) self.assert_(p.baz == 'xxx') p = roundtrip(p) self.assert_(p.foo is None) self.assert_(p.bar is None) self.assert_(p.baz == 'xxx') # Ensure an immediate __set__ works. p2 = Parent('p2') p2.bar = 'quux'
def test_connect_error(self): dbapi = MockDBAPI() p = pool.AssertionPool(creator=lambda: dbapi.connect('foo.db')) c1 = p.connect() assert_raises(AssertionError, p.connect)
def test_mapping_ops(self): Parent, Child = self.Parent, self.Child p1 = Parent('P1') self.assert_(not p1._children) self.assert_(not p1.children) ch = Child('a', 'regular') p1._children.append(ch) self.assert_(ch in list(p1._children.values())) self.assert_(len(p1._children) == 1) self.assert_(p1.children) self.assert_(len(p1.children) == 1) self.assert_(ch not in p1.children) self.assert_('a' in p1.children) self.assert_(p1.children['a'] == 'regular') self.assert_(p1._children['a'] == ch) p1.children['b'] = 'proxied' self.assert_('proxied' in list(p1.children.values())) self.assert_('b' in p1.children) self.assert_('proxied' not in p1._children) self.assert_(len(p1.children) == 2) self.assert_(len(p1._children) == 2) self.assert_(p1._children['a'].name == 'regular') self.assert_(p1._children['b'].name == 'proxied') del p1._children['b'] self.assert_(len(p1._children) == 1) self.assert_(len(p1.children) == 1) self.assert_(p1._children['a'] == ch) del p1.children['a'] self.assert_(len(p1._children) == 0) self.assert_(len(p1.children) == 0) p1.children = {'d': 'v d', 'e': 'v e', 'f': 'v f'} self.assert_(len(p1._children) == 3) self.assert_(len(p1.children) == 3) self.assert_(set(p1.children) == set(['d', 'e', 'f'])) del ch p1 = self.roundtrip(p1) self.assert_(len(p1._children) == 3) self.assert_(len(p1.children) == 3) p1.children['e'] = 'changed-in-place' self.assert_(p1.children['e'] == 'changed-in-place') inplace_id = p1._children['e'].id p1 = self.roundtrip(p1) self.assert_(p1.children['e'] == 'changed-in-place') self.assert_(p1._children['e'].id == inplace_id) p1._children = {} self.assert_(len(p1.children) == 0) try: p1._children = [] self.assert_(False) except TypeError: self.assert_(True) try: p1._children = None self.assert_(False) except TypeError: self.assert_(True) assert_raises(TypeError, set, [p1.children])
def fails(method, attr): return assert_raises(KeyError, getattr(manager, method), attr, property())
def test_filter_collection_has_fails_ul_nul(self): User = self.classes.User assert_raises(exc.InvalidRequestError, lambda: User.keywords.has(keyword='quick'))
def test_set_operations(self): Parent, Child = self.Parent, self.Child p1 = Parent('P1') self.assert_(not p1._children) self.assert_(not p1.children) ch1 = Child('regular') p1._children.add(ch1) self.assert_(ch1 in p1._children) self.assert_(len(p1._children) == 1) self.assert_(p1.children) self.assert_(len(p1.children) == 1) self.assert_(ch1 not in p1.children) self.assert_('regular' in p1.children) p1.children.add('proxied') self.assert_('proxied' in p1.children) self.assert_('proxied' not in p1._children) self.assert_(len(p1.children) == 2) self.assert_(len(p1._children) == 2) self.assert_( set([o.name for o in p1._children]) == set(['regular', 'proxied'])) ch2 = None for o in p1._children: if o.name == 'proxied': ch2 = o break p1._children.remove(ch2) self.assert_(len(p1._children) == 1) self.assert_(len(p1.children) == 1) self.assert_(p1._children == set([ch1])) p1.children.remove('regular') self.assert_(len(p1._children) == 0) self.assert_(len(p1.children) == 0) p1.children = ['a', 'b', 'c'] self.assert_(len(p1._children) == 3) self.assert_(len(p1.children) == 3) del ch1 p1 = self.roundtrip(p1) self.assert_(len(p1._children) == 3) self.assert_(len(p1.children) == 3) self.assert_('a' in p1.children) self.assert_('b' in p1.children) self.assert_('d' not in p1.children) self.assert_(p1.children == set(['a', 'b', 'c'])) assert_raises(KeyError, p1.children.remove, "d") self.assert_(len(p1.children) == 3) p1.children.discard('d') self.assert_(len(p1.children) == 3) p1 = self.roundtrip(p1) self.assert_(len(p1.children) == 3) popped = p1.children.pop() self.assert_(len(p1.children) == 2) self.assert_(popped not in p1.children) p1 = self.roundtrip(p1) self.assert_(len(p1.children) == 2) self.assert_(popped not in p1.children) p1.children = ['a', 'b', 'c'] p1 = self.roundtrip(p1) self.assert_(p1.children == set(['a', 'b', 'c'])) p1.children.discard('b') p1 = self.roundtrip(p1) self.assert_(p1.children == set(['a', 'c'])) p1.children.remove('a') p1 = self.roundtrip(p1) self.assert_(p1.children == set(['c'])) p1._children = set() self.assert_(len(p1.children) == 0) try: p1._children = [] self.assert_(False) except TypeError: self.assert_(True) try: p1._children = None self.assert_(False) except TypeError: self.assert_(True) assert_raises(TypeError, set, [p1.children])
def test_filter_scalar_contains_fails_nul_nul(self): Keyword = self.classes.Keyword assert_raises(exc.InvalidRequestError, lambda: Keyword.user.contains(self.u))
def test_filter_collection_ne_fails_ul_nul(self): User = self.classes.User assert_raises(exc.InvalidRequestError, lambda: User.keywords != self.kw)
def _test_sequence_ops(self): Parent, Child = self.Parent, self.Child p1 = Parent('P1') self.assert_(not p1._children) self.assert_(not p1.children) ch = Child('regular') p1._children.append(ch) self.assert_(ch in p1._children) self.assert_(len(p1._children) == 1) self.assert_(p1.children) self.assert_(len(p1.children) == 1) self.assert_(ch not in p1.children) self.assert_('regular' in p1.children) p1.children.append('proxied') self.assert_('proxied' in p1.children) self.assert_('proxied' not in p1._children) self.assert_(len(p1.children) == 2) self.assert_(len(p1._children) == 2) self.assert_(p1._children[0].name == 'regular') self.assert_(p1._children[1].name == 'proxied') del p1._children[1] self.assert_(len(p1._children) == 1) self.assert_(len(p1.children) == 1) self.assert_(p1._children[0] == ch) del p1.children[0] self.assert_(len(p1._children) == 0) self.assert_(len(p1.children) == 0) p1.children = ['a', 'b', 'c'] self.assert_(len(p1._children) == 3) self.assert_(len(p1.children) == 3) del ch p1 = self.roundtrip(p1) self.assert_(len(p1._children) == 3) self.assert_(len(p1.children) == 3) popped = p1.children.pop() self.assert_(len(p1.children) == 2) self.assert_(popped not in p1.children) p1 = self.roundtrip(p1) self.assert_(len(p1.children) == 2) self.assert_(popped not in p1.children) p1.children[1] = 'changed-in-place' self.assert_(p1.children[1] == 'changed-in-place') inplace_id = p1._children[1].id p1 = self.roundtrip(p1) self.assert_(p1.children[1] == 'changed-in-place') assert p1._children[1].id == inplace_id p1.children.append('changed-in-place') self.assert_(p1.children.count('changed-in-place') == 2) p1.children.remove('changed-in-place') self.assert_(p1.children.count('changed-in-place') == 1) p1 = self.roundtrip(p1) self.assert_(p1.children.count('changed-in-place') == 1) p1._children = [] self.assert_(len(p1.children) == 0) after = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'] p1.children = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'] self.assert_(len(p1.children) == 10) self.assert_([c.name for c in p1._children] == after) p1.children[2:6] = ['x'] * 4 after = ['a', 'b', 'x', 'x', 'x', 'x', 'g', 'h', 'i', 'j'] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) p1.children[2:6] = ['y'] after = ['a', 'b', 'y', 'g', 'h', 'i', 'j'] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) p1.children[2:3] = ['z'] * 4 after = ['a', 'b', 'z', 'z', 'z', 'z', 'g', 'h', 'i', 'j'] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) p1.children[2::2] = ['O'] * 4 after = ['a', 'b', 'O', 'z', 'O', 'z', 'O', 'h', 'O', 'j'] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) assert_raises(TypeError, set, [p1.children]) p1.children *= 0 after = [] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) p1.children += ['a', 'b'] after = ['a', 'b'] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) p1.children += ['c'] after = ['a', 'b', 'c'] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) p1.children *= 1 after = ['a', 'b', 'c'] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) p1.children *= 2 after = ['a', 'b', 'c', 'a', 'b', 'c'] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) p1.children = ['a'] after = ['a'] self.assert_(p1.children == after) self.assert_([c.name for c in p1._children] == after) self.assert_((p1.children * 2) == ['a', 'a']) self.assert_((2 * p1.children) == ['a', 'a']) self.assert_((p1.children * 0) == []) self.assert_((0 * p1.children) == []) self.assert_((p1.children + ['b']) == ['a', 'b']) self.assert_((['b'] + p1.children) == ['b', 'a']) try: p1.children + 123 assert False except TypeError: assert True
def test_filter_scalar_any_fails_nul_nul(self): Keyword = self.classes.Keyword assert_raises(exc.InvalidRequestError, lambda: Keyword.user.any(name='user2'))