def define_tables(cls, metadata): if testing.against("oracle"): fk_args = dict(deferrable=True, initially="deferred") elif testing.against("mysql"): fk_args = {} else: fk_args = dict(onupdate="cascade") Table( "users", metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True), ) Table( "addresses", metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True), Column("user_id", Integer, ForeignKey("users.id", **fk_args)), )
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') elif testing.against('mysql'): fk_args = {} else: fk_args = dict(onupdate='cascade') Table( 'users', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), ) Table( 'addresses', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', Integer, ForeignKey('users.id', **fk_args)), )
def _non_default_isolation_level(self): if testing.against('sqlite'): return 'READ UNCOMMITTED' elif testing.against('postgresql'): return 'SERIALIZABLE' elif testing.against('mysql'): return "SERIALIZABLE" else: assert False, "non default isolation level not known"
def _non_default_isolation_level(self): if testing.against("sqlite"): return "READ UNCOMMITTED" elif testing.against("postgresql"): return "SERIALIZABLE" elif testing.against("mysql"): return "SERIALIZABLE" else: assert False, "non default isolation level not known"
def _non_default_isolation_level(self): if testing.against("sqlite"): return "READ UNCOMMITTED" elif testing.against("postgresql"): return "SERIALIZABLE" elif testing.against("mysql"): return "SERIALIZABLE" elif testing.against("mssql"): return "SERIALIZABLE" else: assert False, "non default isolation level not known"
def test_set(self): with testing.expect_deprecated('Manually quoting SET value literals'): e1, e2 = mysql.SET("'a'", "'b'"), mysql.SET("'a'", "'b'") set_table = Table( 'mysql_set', self.metadata, Column('e1', e1), Column('e2', e2, nullable=False), Column('e3', mysql.SET("a", "b")), Column('e4', mysql.SET("'a'", "b")), Column('e5', mysql.SET("'a'", "'b'", quoting="quoted"))) eq_(colspec(set_table.c.e1), "e1 SET('a','b')") eq_(colspec(set_table.c.e2), "e2 SET('a','b') NOT NULL") eq_(colspec(set_table.c.e3), "e3 SET('a','b')") eq_(colspec(set_table.c.e4), "e4 SET('''a''','b')") eq_(colspec(set_table.c.e5), "e5 SET('a','b')") set_table.create() assert_raises(exc.DBAPIError, set_table.insert().execute, e1=None, e2=None, e3=None, e4=None) if testing.against("+oursql"): assert_raises(exc.StatementError, set_table.insert().execute, e1='c', e2='c', e3='c', e4='c') set_table.insert().execute(e1='a', e2='a', e3='a', e4="'a'", e5="a,b") set_table.insert().execute(e1='b', e2='b', e3='b', e4='b', e5="a,b") res = set_table.select().execute().fetchall() if testing.against("+oursql"): expected = [ # 1st row with all c's, data truncated (set(['']), set(['']), set(['']), set(['']), None), ] else: expected = [] expected.extend([ (set(['a']), set(['a']), set(['a']), set(["'a'"]), set(['a', 'b'])), (set(['b']), set(['b']), set(['b']), set(['b']), set(['a', 'b'])) ]) eq_(res, expected)
def test_set(self): with testing.expect_deprecated('Manually quoting SET value literals'): e1, e2 = mysql.SET("'a'", "'b'"), mysql.SET("'a'", "'b'") e4 = mysql.SET("'a'", "b") e5 = mysql.SET("'a'", "'b'", quoting="quoted") set_table = Table('mysql_set', self.metadata, Column('e1', e1), Column('e2', e2, nullable=False), Column('e3', mysql.SET("a", "b")), Column('e4', e4), Column('e5', e5) ) eq_(colspec(set_table.c.e1), "e1 SET('a','b')") eq_(colspec(set_table.c.e2), "e2 SET('a','b') NOT NULL") eq_(colspec(set_table.c.e3), "e3 SET('a','b')") eq_(colspec(set_table.c.e4), "e4 SET('''a''','b')") eq_(colspec(set_table.c.e5), "e5 SET('a','b')") set_table.create() assert_raises(exc.DBAPIError, set_table.insert().execute, e1=None, e2=None, e3=None, e4=None) if testing.against("+oursql"): assert_raises(exc.StatementError, set_table.insert().execute, e1='c', e2='c', e3='c', e4='c') set_table.insert().execute(e1='a', e2='a', e3='a', e4="'a'", e5="a,b") set_table.insert().execute(e1='b', e2='b', e3='b', e4='b', e5="a,b") res = set_table.select().execute().fetchall() if not testing.against("+oursql"): # oursql receives this for first row: # (set(['']), set(['']), set(['']), set(['']), None), # but based on ...OS? MySQL version? not clear. # not worth testing. expected = [] expected.extend([ (set(['a']), set(['a']), set(['a']), set(["'a'"]), set(['a', 'b'])), (set(['b']), set(['b']), set(['b']), set(['b']), set(['a', 'b'])) ]) eq_(res, expected)
def test_executemany(): # return value is documented as failing with psycopg2/executemany result2 = table.insert().returning(table).execute( [{'persons': 2, 'full': False}, {'persons': 3, 'full': True}]) if testing.against('mssql+zxjdbc'): # jtds apparently returns only the first row eq_(result2.fetchall(), [(2, 2, False, None)]) elif testing.against('firebird', 'mssql', 'oracle'): # Multiple inserts only return the last row eq_(result2.fetchall(), [(3, 3, True, None)]) else: # nobody does this as far as we know (pg8000?) eq_(result2.fetchall(), [(2, 2, False, None), (3, 3, True, None)])
def setup_class(cls): # trigger mysql _server_casing check... testing.db.connect().close() cls.bind = bind = engines.utf8_engine(options={"convert_unicode": True}) cls.metadata = metadata = MetaData() no_multibyte_period = set([(u"plain", u"col_plain", u"ix_plain")]) no_has_table = [ (u"no_has_table_1", u"col_Unit\u00e9ble", u"ix_Unit\u00e9ble"), (u"no_has_table_2", u"col_\u6e2c\u8a66", u"ix_\u6e2c\u8a66"), ] no_case_sensitivity = [ (u"\u6e2c\u8a66", u"col_\u6e2c\u8a66", u"ix_\u6e2c\u8a66"), (u"unit\u00e9ble", u"col_unit\u00e9ble", u"ix_unit\u00e9ble"), ] full = [ (u"Unit\u00e9ble", u"col_Unit\u00e9ble", u"ix_Unit\u00e9ble"), (u"\u6e2c\u8a66", u"col_\u6e2c\u8a66", u"ix_\u6e2c\u8a66"), ] # as you can see, our options for this kind of thing # are really limited unless you're on PG or SQLite # forget about it on these backends if testing.against("sybase", "maxdb", "oracle"): names = no_multibyte_period # mysql can't handle casing usually elif testing.against("mysql") and not testing.requires._has_mysql_fully_case_sensitive(): names = no_multibyte_period.union(no_case_sensitivity) # mssql + pyodbc + freetds can't compare multibyte names to # information_schema.tables.table_name elif testing.against("mssql"): names = no_multibyte_period.union(no_has_table) else: names = no_multibyte_period.union(full) for tname, cname, ixname in names: t = Table( tname, metadata, Column("id", sa.Integer, sa.Sequence(cname + "_id_seq"), primary_key=True), Column(cname, Integer), ) schema.Index(ixname, t.c[cname]) metadata.create_all(bind) cls.names = names
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') elif testing.against('mysql'): fk_args = {} else: fk_args = dict(onupdate='cascade') Table('users', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True)) Table('addresses', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', Integer, ForeignKey('users.id', **fk_args)))
def test_literal_returning(self, connection): if testing.against("mariadb"): quote = "`" else: quote = '"' if testing.against("postgresql"): literal_true = "true" else: literal_true = "1" result4 = connection.exec_driver_sql( "insert into tables (id, persons, %sfull%s) " "values (5, 10, %s) returning persons" % (quote, quote, literal_true)) eq_([dict(row._mapping) for row in result4], [{"persons": 10}])
def define_tables(cls, metadata): if testing.against("oracle"): fk_args = dict(deferrable=True, initially="deferred") elif testing.against("mysql"): fk_args = {} else: fk_args = dict(onupdate="cascade") Table("users", metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True)) Table( "addresses", metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True), Column("user_id", Integer, ForeignKey("users.id", **fk_args)), )
def test_set(self): with testing.expect_deprecated("Manually quoting SET value literals"): e1, e2 = mysql.SET("'a'", "'b'"), mysql.SET("'a'", "'b'") set_table = Table( "mysql_set", self.metadata, Column("e1", e1), Column("e2", e2, nullable=False), Column("e3", mysql.SET("a", "b")), Column("e4", mysql.SET("'a'", "b")), Column("e5", mysql.SET("'a'", "'b'", quoting="quoted")), ) eq_(colspec(set_table.c.e1), "e1 SET('a','b')") eq_(colspec(set_table.c.e2), "e2 SET('a','b') NOT NULL") eq_(colspec(set_table.c.e3), "e3 SET('a','b')") eq_(colspec(set_table.c.e4), "e4 SET('''a''','b')") eq_(colspec(set_table.c.e5), "e5 SET('a','b')") set_table.create() assert_raises(exc.DBAPIError, set_table.insert().execute, e1=None, e2=None, e3=None, e4=None) if testing.against("+oursql"): assert_raises(exc.StatementError, set_table.insert().execute, e1="c", e2="c", e3="c", e4="c") set_table.insert().execute(e1="a", e2="a", e3="a", e4="'a'", e5="a,b") set_table.insert().execute(e1="b", e2="b", e3="b", e4="b", e5="a,b") res = set_table.select().execute().fetchall() if testing.against("+oursql"): expected = [ # 1st row with all c's, data truncated (set([""]), set([""]), set([""]), set([""]), None) ] else: expected = [] expected.extend( [ (set(["a"]), set(["a"]), set(["a"]), set(["'a'"]), set(["a", "b"])), (set(["b"]), set(["b"]), set(["b"]), set(["b"]), set(["a", "b"])), ] ) eq_(res, expected)
def test_int_default_none_on_insert_reflected(self): metadata = self.metadata Table( 'x', metadata, Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)), implicit_returning=False ) metadata.create_all() m2 = MetaData(metadata.bind) t2 = Table('x', m2, autoload=True, implicit_returning=False) r = t2.insert().execute(data='data') eq_(r.inserted_primary_key, [None]) if testing.against('sqlite'): eq_( t2.select().execute().fetchall(), [(1, 'data')] ) else: eq_( t2.select().execute().fetchall(), [(5, 'data')] )
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') Table('person', metadata, Column('name', String(50), primary_key=True), Column('type', String(50), nullable=False), test_needs_fk=True) Table('engineer', metadata, Column('name', String(50), ForeignKey('person.name', **fk_args), primary_key=True), Column('primary_language', String(50)), Column('boss_name', String(50), ForeignKey('manager.name', **fk_args)), test_needs_fk=True ) Table('manager', metadata, Column('name', String(50), ForeignKey('person.name', **fk_args), primary_key=True), Column('paperwork', String(50)), test_needs_fk=True )
def test_reflect_nvarchar(self, metadata, connection): Table( "tnv", metadata, Column("nv_data", sqltypes.NVARCHAR(255)), Column("c_data", sqltypes.NCHAR(20)), ) metadata.create_all(connection) m2 = MetaData() t2 = Table("tnv", m2, autoload_with=connection) assert isinstance(t2.c.nv_data.type, sqltypes.NVARCHAR) assert isinstance(t2.c.c_data.type, sqltypes.NCHAR) if testing.against("oracle+cx_oracle"): assert isinstance( t2.c.nv_data.type.dialect_impl(connection.dialect), cx_oracle._OracleUnicodeStringNCHAR, ) assert isinstance( t2.c.c_data.type.dialect_impl(connection.dialect), cx_oracle._OracleNChar, ) data = "m’a réveillé." connection.execute(t2.insert(), dict(nv_data=data, c_data=data)) nv_data, c_data = connection.execute(t2.select()).first() eq_(nv_data, data) eq_(c_data, data + (" " * 7)) # char is space padded assert isinstance(nv_data, str) assert isinstance(c_data, str)
class Address(Base, fixtures.ComparableEntity): __tablename__ = 'addresses' __autoload__ = True if testing.against('oracle', 'firebird'): id = Column('id', Integer, primary_key=True, test_needs_autoincrement=True)
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') users = Table('users', metadata, Column('username', String(50), primary_key=True), Column('fullname', String(100)), test_needs_fk=True) addresses = Table('addresses', metadata, Column('email', String(50), primary_key=True), Column('username', String(50), ForeignKey('users.username', **fk_args)), test_needs_fk=True) items = Table('items', metadata, Column('itemname', String(50), primary_key=True), Column('description', String(100)), test_needs_fk=True) users_to_items = Table('users_to_items', metadata, Column('username', String(50), ForeignKey('users.username', **fk_args), primary_key=True), Column('itemname', String(50), ForeignKey('items.itemname', **fk_args), primary_key=True), test_needs_fk=True)
def test_reflect_nvarchar(self): metadata = self.metadata Table( "tnv", metadata, Column("nv_data", sqltypes.NVARCHAR(255)), Column("c_data", sqltypes.NCHAR(20)), ) metadata.create_all() m2 = MetaData(testing.db) t2 = Table("tnv", m2, autoload=True) assert isinstance(t2.c.nv_data.type, sqltypes.NVARCHAR) assert isinstance(t2.c.c_data.type, sqltypes.NCHAR) if testing.against("oracle+cx_oracle"): assert isinstance( t2.c.nv_data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleUnicodeStringNCHAR, ) assert isinstance( t2.c.c_data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleNChar, ) data = u("m’a réveillé.") with testing.db.connect() as conn: conn.execute(t2.insert(), dict(nv_data=data, c_data=data)) nv_data, c_data = conn.execute(t2.select()).first() eq_(nv_data, data) eq_(c_data, data + (" " * 7)) # char is space padded assert isinstance(nv_data, util.text_type) assert isinstance(c_data, util.text_type)
def define_temp_tables(cls, metadata): # the definition of temporary tables in the temporary table tests needs to be overwritten, # because similar to oracle, in HANA one needs to mention GLOBAL or LOCAL in the temporary table definition if testing.against("hana"): kw = { 'prefixes': ["GLOBAL TEMPORARY"], } else: kw = { 'prefixes': ["TEMPORARY"], } user_tmp = Table("user_tmp", metadata, Column("id", sa.INT, primary_key=True), Column('name', sa.VARCHAR(50)), Column('foo', sa.INT), sa.UniqueConstraint('name', name='user_tmp_uq'), sa.Index("user_tmp_ix", "foo"), **kw) if testing.requires.view_reflection.enabled and \ testing.requires.temporary_views.enabled: event.listen( user_tmp, "after_create", DDL("create temporary view user_tmp_v as " "select * from user_tmp")) event.listen(user_tmp, "before_drop", DDL("drop view user_tmp_v"))
def test_limit(self): """test limit operations combined with lazy-load relationships.""" users, items, order_items, orders, Item, \ User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses) mapper(Item, items) mapper(Order, orders, properties={ 'items': relationship(Item, secondary=order_items, lazy='select') }) mapper(User, users, properties={ 'addresses': relationship( mapper(Address, addresses), lazy='select'), 'orders': relationship(Order, lazy='select') }) sess = create_session() q = sess.query(User) if testing.against('mssql'): result = q.limit(2).all() assert self.static.user_all_result[:2] == result else: result = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == result
def test_reflect_nvarchar(self): metadata = self.metadata Table( "tnv", metadata, Column("nv_data", sqltypes.NVARCHAR(255)), Column("c_data", sqltypes.NCHAR(20)), ) metadata.create_all() m2 = MetaData(testing.db) t2 = Table("tnv", m2, autoload=True) assert isinstance(t2.c.nv_data.type, sqltypes.NVARCHAR) assert isinstance(t2.c.c_data.type, sqltypes.NCHAR) if testing.against("oracle+cx_oracle"): assert isinstance( t2.c.nv_data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleUnicodeStringNCHAR, ) assert isinstance( t2.c.c_data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleUnicodeStringNCHAR, ) data = u("m’a réveillé.") with testing.db.connect() as conn: conn.execute(t2.insert(), dict(nv_data=data, c_data=data)) nv_data, c_data = conn.execute(t2.select()).first() eq_(nv_data, data) eq_(c_data, data + (" " * 7)) # char is space padded assert isinstance(nv_data, util.text_type) assert isinstance(c_data, util.text_type)
def test_limit(self): """test limit operations combined with lazy-load relationships.""" users, items, order_items, orders, Item, \ User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses) mapper(Item, items) mapper(Order, orders, properties={ 'items': relationship(Item, secondary=order_items, lazy='select') }) mapper(User, users, properties={ 'addresses': relationship( mapper(Address, addresses), lazy='select'), 'orders': relationship(Order, lazy='select') }) sess = create_session() q = sess.query(User) if testing.against('mssql'): l = q.limit(2).all() assert self.static.user_all_result[:2] == l else: l = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == l
def test_text_doesnt_explode(self): for s in [ select( [ case( [ ( info_table.c.info == 'pk_4_data', text("'yes'"))], else_=text("'no'")) ]).order_by(info_table.c.info), select( [ case( [ ( info_table.c.info == 'pk_4_data', literal_column("'yes'"))], else_=literal_column("'no'") )] ).order_by(info_table.c.info), ]: if testing.against("firebird"): eq_(s.execute().fetchall(), [ ('no ', ), ('no ', ), ('no ', ), ('yes', ), ('no ', ), ('no ', ), ]) else: eq_(s.execute().fetchall(), [ ('no', ), ('no', ), ('no', ), ('yes', ), ('no', ), ('no', ), ])
def test_explicit_default_schema_metadata(self): engine = testing.db if testing.against("sqlite"): # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc., # but fails on: # FOREIGN KEY(col2) REFERENCES main.table1 (col1) schema = "main" else: schema = engine.dialect.default_schema_name assert bool(schema) metadata = MetaData(engine, schema=schema) table1 = Table("table1", metadata, Column("col1", sa.Integer, primary_key=True), test_needs_fk=True) table2 = Table( "table2", metadata, Column("col1", sa.Integer, primary_key=True), Column("col2", sa.Integer, sa.ForeignKey("table1.col1")), test_needs_fk=True, ) try: metadata.create_all() metadata.create_all(checkfirst=True) assert len(metadata.tables) == 2 metadata.clear() table1 = Table("table1", metadata, autoload=True) table2 = Table("table2", metadata, autoload=True) assert len(metadata.tables) == 2 finally: metadata.drop_all()
def test_int_default_none_on_insert_reflected(self): metadata = self.metadata Table('x', metadata, Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)), implicit_returning=False ) metadata.create_all() m2 = MetaData(metadata.bind) t2 = Table('x', m2, autoload=True, implicit_returning=False) r = t2.insert().execute(data='data') eq_(r.inserted_primary_key, [None]) if testing.against('sqlite'): eq_( t2.select().execute().fetchall(), [(1, 'data')] ) else: eq_( t2.select().execute().fetchall(), [(5, 'data')] )
def test_nullable_kwarg(self): t = Table( "t", MetaData(), Column("a", Integer(), Identity(), nullable=False), Column("b", Integer(), Identity(), nullable=True), Column("c", Integer(), Identity()), ) is_(t.c.a.nullable, False) is_(t.c.b.nullable, True) is_(t.c.c.nullable, False) nullable = "" if getattr( self, "__dialect__", None) != "default_enhanced" and testing.against("postgresql"): nullable = " NULL" self.assert_compile( CreateTable(t), ("CREATE TABLE t (" "a INTEGER GENERATED BY DEFAULT AS IDENTITY, " "b INTEGER GENERATED BY DEFAULT AS IDENTITY%s, " "c INTEGER GENERATED BY DEFAULT AS IDENTITY" ")") % nullable, )
def test_updatemany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql+mysqldb') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return t.insert().execute({}, {}, {}) t.update(t.c.col1 == sa.bindparam('pkval')).execute({ 'pkval': 51, 'col7': None, 'col8': None, 'boolcol1': False }) t.update(t.c.col1 == sa.bindparam('pkval')).execute({'pkval': 51}, {'pkval': 52}, {'pkval': 53}) l = t.select().execute() ctexec = currenttime.scalar() today = datetime.date.today() eq_(l.fetchall(), [(51, 'im the update', f2, ts, ts, ctexec, False, False, 13, today, 'py', 'hi'), (52, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py', 'hi'), (53, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py', 'hi')])
def test_updatemany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql+mysqldb') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return t.insert().execute({}, {}, {}) t.update(t.c.col1 == sa.bindparam('pkval')).execute( {'pkval': 51, 'col7': None, 'col8': None, 'boolcol1': False}) t.update(t.c.col1 == sa.bindparam('pkval')).execute( {'pkval': 51}, {'pkval': 52}, {'pkval': 53}) l = t.select().execute() ctexec = currenttime.scalar() today = datetime.date.today() eq_(l.fetchall(), [(51, 'im the update', f2, ts, ts, ctexec, False, False, 13, today, 'py', 'hi'), (52, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py', 'hi'), (53, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py', 'hi')])
def test_text_doesnt_explode(self): for s in [ select([ case([(info_table.c.info == 'pk_4_data', text("'yes'"))], else_=text("'no'")) ]).order_by(info_table.c.info), select([ case([(info_table.c.info == 'pk_4_data', literal_column("'yes'"))], else_=literal_column("'no'")) ]).order_by(info_table.c.info), ]: if testing.against("firebird"): eq_(s.execute().fetchall(), [ ('no ', ), ('no ', ), ('no ', ), ('yes', ), ('no ', ), ('no ', ), ]) else: eq_(s.execute().fetchall(), [ ('no', ), ('no', ), ('no', ), ('yes', ), ('no', ), ('no', ), ])
def test_column_accessor_sqlite_raw(self): users = self.tables.users users.insert().execute( dict(user_id=1, user_name='john'), ) r = text( "select users.user_id, users.user_name " "from users " "UNION select users.user_id, " "users.user_name from users", bind=testing.db).execution_options(sqlite_raw_colnames=True). \ execute().first() if testing.against("sqlite < 3.10.0"): not_in_('user_id', r) not_in_('user_name', r) eq_(r['users.user_id'], 1) eq_(r['users.user_name'], "john") eq_(list(r.keys()), ["users.user_id", "users.user_name"]) else: not_in_('users.user_id', r) not_in_('users.user_name', r) eq_(r['user_id'], 1) eq_(r['user_name'], "john") eq_(list(r.keys()), ["user_id", "user_name"])
def define_temp_tables(cls, metadata): # cheat a bit, we should fix this with some dialect-level # temp table fixture if testing.against("oracle"): kw = { 'prefixes': ["GLOBAL TEMPORARY"], 'oracle_on_commit': 'PRESERVE ROWS' } else: kw = { 'prefixes': ["TEMPORARY"], } user_tmp = Table("user_tmp", metadata, Column("id", sa.INT, primary_key=True), Column('name', sa.VARCHAR(50)), Column('foo', sa.INT), sa.UniqueConstraint('name', name='user_tmp_uq'), sa.Index("user_tmp_ix", "foo"), **kw) if testing.requires.view_reflection.enabled and \ testing.requires.temporary_views.enabled: event.listen( user_tmp, "after_create", DDL("create temporary view user_tmp_v as " "select * from user_tmp")) event.listen(user_tmp, "before_drop", DDL("drop view user_tmp_v"))
def define_temp_tables(cls, metadata): # cheat a bit, we should fix this with some dialect-level # temp table fixture if testing.against("oracle"): kw = { 'prefixes': ["GLOBAL TEMPORARY"], 'oracle_on_commit': 'PRESERVE ROWS' } else: kw = { 'prefixes': ["TEMPORARY"], } user_tmp = Table( "user_tmp", metadata, Column("id", sa.INT, primary_key=True), Column('name', sa.VARCHAR(50)), Column('foo', sa.INT), sa.UniqueConstraint('name', name='user_tmp_uq'), sa.Index("user_tmp_ix", "foo"), **kw ) if testing.requires.view_reflection.enabled and \ testing.requires.temporary_views.enabled: event.listen( user_tmp, "after_create", DDL("create temporary view user_tmp_v as " "select * from user_tmp") ) event.listen( user_tmp, "before_drop", DDL("drop view user_tmp_v") )
def _test_lastrow_accessor(self, table_, values, assertvalues): """Tests the inserted_primary_key and lastrow_has_id() functions.""" def insert_values(engine, table_, values): """ Inserts a row into a table, returns the full list of values INSERTed including defaults that fired off on the DB side and detects rows that had defaults and post-fetches. """ # verify implicit_returning is working if engine.dialect.implicit_returning: ins = table_.insert() comp = ins.compile(engine, column_keys=list(values)) if not set(values).issuperset( c.key for c in table_.primary_key ): is_(bool(comp.returning), True) result = engine.execute(table_.insert(), **values) ret = values.copy() for col, id_ in zip( table_.primary_key, result.inserted_primary_key ): ret[col.key] = id_ if result.lastrow_has_defaults(): criterion = and_( *[ col == id_ for col, id_ in zip( table_.primary_key, result.inserted_primary_key ) ] ) row = engine.execute(table_.select(criterion)).first() for c in table_.c: ret[c.key] = row._mapping[c] return ret if testing.against("firebird", "postgresql", "oracle", "mssql"): assert testing.db.dialect.implicit_returning if testing.db.dialect.implicit_returning: test_engines = [ engines.testing_engine(options={"implicit_returning": False}), engines.testing_engine(options={"implicit_returning": True}), ] else: test_engines = [testing.db] for engine in test_engines: try: table_.create(bind=engine, checkfirst=True) i = insert_values(engine, table_, values) eq_(i, assertvalues) finally: table_.drop(bind=engine)
def _test_lastrow_accessor(self, table_, values, assertvalues): """Tests the inserted_primary_key and lastrow_has_id() functions.""" def insert_values(engine, table_, values): """ Inserts a row into a table, returns the full list of values INSERTed including defaults that fired off on the DB side and detects rows that had defaults and post-fetches. """ # verify implicit_returning is working if engine.dialect.implicit_returning: ins = table_.insert() comp = ins.compile(engine, column_keys=list(values)) if not set(values).issuperset( c.key for c in table_.primary_key ): is_(bool(comp.returning), True) result = engine.execute(table_.insert(), **values) ret = values.copy() for col, id_ in zip( table_.primary_key, result.inserted_primary_key ): ret[col.key] = id_ if result.lastrow_has_defaults(): criterion = and_( *[ col == id_ for col, id_ in zip( table_.primary_key, result.inserted_primary_key ) ] ) row = engine.execute(table_.select(criterion)).first() for c in table_.c: ret[c.key] = row[c] return ret if testing.against("firebird", "postgresql", "oracle", "mssql"): assert testing.db.dialect.implicit_returning if testing.db.dialect.implicit_returning: test_engines = [ engines.testing_engine(options={"implicit_returning": False}), engines.testing_engine(options={"implicit_returning": True}), ] else: test_engines = [testing.db] for engine in test_engines: try: table_.create(bind=engine, checkfirst=True) i = insert_values(engine, table_, values) eq_(i, assertvalues) finally: table_.drop(bind=engine)
class IMHandle(Base, fixtures.ComparableEntity): __tablename__ = 'imhandles' __autoload__ = True if testing.against('oracle', 'firebird'): id = Column('id', Integer, primary_key=True, test_needs_autoincrement=True) user_id = Column('user_id', Integer, ForeignKey('users.id'))
class User(Base, fixtures.ComparableEntity): __tablename__ = 'users' __autoload__ = True if testing.against('oracle', 'firebird'): id = Column('id', Integer, primary_key=True, test_needs_autoincrement=True) addresses = relationship('Address', backref='user')
def _test_get_columns(self, schema=None, table_type="table"): meta = MetaData(testing.db) users, addresses, dingalings = ( self.tables.users, self.tables.email_addresses, self.tables.dingalings, ) table_names = ["users", "email_addresses"] if table_type == "view": table_names = ["users_v", "email_addresses_v"] insp = inspect(meta.bind) for table_name, table in zip(table_names, (users, addresses)): schema_name = schema cols = insp.get_columns(table_name, schema=schema_name) self.assert_(len(cols) > 0, len(cols)) # should be in order for i, col in enumerate(table.columns): eq_(col.name, cols[i]["name"]) ctype = cols[i]["type"].__class__ ctype_def = col.type if isinstance(ctype_def, sa.types.TypeEngine): ctype_def = ctype_def.__class__ # Oracle returns Date for DateTime. if testing.against("oracle") and ctype_def in ( sql_types.Date, sql_types.DateTime, ): ctype_def = sql_types.Date # assert that the desired type and return type share # a base within one of the generic types. self.assert_( len( set(ctype.__mro__) .intersection(ctype_def.__mro__) .intersection( [ sql_types.Integer, sql_types.Numeric, sql_types.DateTime, sql_types.Date, sql_types.Time, sql_types.String, sql_types._Binary, ] ) ) > 0, "%s(%s), %s(%s)" % (col.name, col.type, cols[i]["name"], ctype), ) if not col.primary_key: assert cols[i]["default"] is None
def test_literal_returning(self): if testing.against("postgresql"): literal_true = "true" else: literal_true = "1" result4 = testing.db.execute('insert into tables (id, persons, "full") ' 'values (5, 10, %s) returning persons' % literal_true) eq_([dict(row) for row in result4], [{'persons': 10}])
def test_integer_types(self): specs = [] for type_ in [ mysql.TINYINT, mysql.SMALLINT, mysql.MEDIUMINT, mysql.INTEGER, mysql.BIGINT, ]: for display_width in [None, 4, 7]: for unsigned in [False, True]: for zerofill in [None, True]: kw = {} if display_width: kw["display_width"] = display_width if unsigned is not None: kw["unsigned"] = unsigned if zerofill is not None: kw["zerofill"] = zerofill zerofill = bool(zerofill) source_type = type_(**kw) if display_width is None: display_width = { mysql.MEDIUMINT: 9, mysql.SMALLINT: 6, mysql.TINYINT: 4, mysql.INTEGER: 11, mysql.BIGINT: 20, }[type_] if zerofill: unsigned = True expected_type = type_( display_width=display_width, unsigned=unsigned, zerofill=zerofill, ) specs.append((source_type, expected_type)) specs.extend( [ (SmallInteger(), mysql.SMALLINT(display_width=6)), (Integer(), mysql.INTEGER(display_width=11)), (BigInteger, mysql.BIGINT(display_width=20)), ] ) # TODO: mysql 8.0.19-ish doesn't consistently report # on display_width. need to test this more accurately though # for the cases where it does if testing.against("mysql >= 8.0.19"): self._run_test(specs, ["unsigned", "zerofill"]) else: self._run_test(specs, ["display_width", "unsigned", "zerofill"])
def test_outer_joinedload_w_limit(self): User = self.classes.User sess = fixture_session() q = sess.query(User).options( joinedload(User.addresses, innerjoin=False)) if testing.against("postgresql"): q = q.with_for_update(of=User) else: q = q.with_for_update() q = q.limit(1) if testing.against("oracle"): assert_raises_message(exc.DatabaseError, "ORA-02014", q.all) else: q.all() sess.close()
def test_outer_joinedload_w_limit(self): User = self.classes.User sess = Session() q = sess.query(User).options( joinedload(User.addresses, innerjoin=False) ) if testing.against("postgresql"): q = q.with_for_update(of=User) else: q = q.with_for_update() q = q.limit(1) if testing.against("oracle"): assert_raises_message(exc.DatabaseError, "ORA-02014", q.all) else: q.all() sess.close()
def test_literal_returning(self, connection): if testing.against("postgresql"): literal_true = "true" else: literal_true = "1" result4 = connection.exec_driver_sql( 'insert into tables (id, persons, "full") ' "values (5, 10, %s) returning persons" % literal_true) eq_([dict(row._mapping) for row in result4], [{"persons": 10}])
def test_year_types(self, metadata, connection): specs = [ (mysql.YEAR(), mysql.YEAR(display_width=4)), (mysql.YEAR(display_width=4), mysql.YEAR(display_width=4)), ] if testing.against("mysql>=8.0.19"): self._run_test(metadata, connection, specs, []) else: self._run_test(metadata, connection, specs, ["display_width"])
def test_error_code(self, metadata, connection): t = Table("t", metadata, Column("id", Integer, primary_key=True)) t.create(connection) errmsg = assert_raises( exc.IntegrityError, connection.execute, t.insert(), [{"id": 1}, {"id": 1}], ) if testing.against("postgresql+pg8000"): # TODO: is there another way we're supposed to see this? eq_(errmsg.orig.args[0]["C"], "23505") else: eq_(errmsg.orig.pgcode, "23505") if testing.against("postgresql+asyncpg"): eq_(errmsg.orig.sqlstate, "23505")
def test_year_types(self): specs = [ (mysql.YEAR(), mysql.YEAR(display_width=4)), (mysql.YEAR(display_width=4), mysql.YEAR(display_width=4)), ] if testing.against("mysql>=8.0.19"): self._run_test(specs, []) else: self._run_test(specs, ["display_width"])
def get_objects_skipping_sqlite_issue(): # pysqlite keeps adding weakref objects which only # get reset after 220 iterations. We'd like to keep these # tests under 50 iterations and ideally about ten, so # just filter them out so that we get a "flatline" more quickly. if testing.against("sqlite+pysqlite"): return [o for o in gc.get_objects() if not isinstance(o, weakref.ref)] else: return gc.get_objects()
def setup_class(cls): global unicode_bind, metadata, t1, t2, t3 unicode_bind = utf8_engine() metadata = MetaData(unicode_bind) t1 = Table( u('unitable1'), metadata, Column(u('méil'), Integer, primary_key=True), Column(ue('\u6e2c\u8a66'), Integer), test_needs_fk=True, ) t2 = Table( u('Unitéble2'), metadata, Column(u('méil'), Integer, primary_key=True, key="a"), Column(ue('\u6e2c\u8a66'), Integer, ForeignKey(u('unitable1.méil')), key="b"), test_needs_fk=True, ) # Few DBs support Unicode foreign keys if testing.against('sqlite'): t3 = Table( ue('\u6e2c\u8a66'), metadata, Column(ue('\u6e2c\u8a66_id'), Integer, primary_key=True, autoincrement=False), Column(ue('unitable1_\u6e2c\u8a66'), Integer, ForeignKey(ue('unitable1.\u6e2c\u8a66'))), Column(u('Unitéble2_b'), Integer, ForeignKey(u('Unitéble2.b'))), Column(ue('\u6e2c\u8a66_self'), Integer, ForeignKey(ue('\u6e2c\u8a66.\u6e2c\u8a66_id'))), test_needs_fk=True, ) else: t3 = Table( ue('\u6e2c\u8a66'), metadata, Column(ue('\u6e2c\u8a66_id'), Integer, primary_key=True, autoincrement=False), Column(ue('unitable1_\u6e2c\u8a66'), Integer), Column(u('Unitéble2_b'), Integer), Column(ue('\u6e2c\u8a66_self'), Integer), test_needs_fk=True, ) metadata.create_all()
def test_limit(self): """test limit operations combined with lazy-load relationships.""" ( users, items, order_items, orders, Item, User, Address, Order, addresses, ) = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) mapper(Item, items) mapper( Order, orders, properties={ "items": relationship(Item, secondary=order_items, lazy="select") }, ) mapper( User, users, properties={ "addresses": relationship(mapper(Address, addresses), lazy="select"), "orders": relationship(Order, lazy="select"), }, ) sess = create_session() q = sess.query(User) if testing.against("mssql"): result = q.limit(2).all() assert self.static.user_all_result[:2] == result else: result = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == result
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') Table('nodes', metadata, Column('name', String(50), primary_key=True), Column('parent', String(50), ForeignKey('nodes.name', **fk_args)), test_needs_fk=True )
def setup_class(cls): global metadata, t1, t2, t3 metadata = MetaData(testing.db) t1 = Table(u('unitable1'), metadata, Column(u('méil'), Integer, primary_key=True), Column(ue('\u6e2c\u8a66'), Integer), test_needs_fk=True, ) t2 = Table( u('Unitéble2'), metadata, Column( u('méil'), Integer, primary_key=True, key="a"), Column( ue('\u6e2c\u8a66'), Integer, ForeignKey( u('unitable1.méil')), key="b"), test_needs_fk=True, ) # Few DBs support Unicode foreign keys if testing.against('sqlite'): t3 = Table(ue('\u6e2c\u8a66'), metadata, Column(ue('\u6e2c\u8a66_id'), Integer, primary_key=True, autoincrement=False), Column(ue('unitable1_\u6e2c\u8a66'), Integer, ForeignKey(ue('unitable1.\u6e2c\u8a66')) ), Column(u('Unitéble2_b'), Integer, ForeignKey(u('Unitéble2.b')) ), Column(ue('\u6e2c\u8a66_self'), Integer, ForeignKey(ue('\u6e2c\u8a66.\u6e2c\u8a66_id')) ), test_needs_fk=True, ) else: t3 = Table(ue('\u6e2c\u8a66'), metadata, Column(ue('\u6e2c\u8a66_id'), Integer, primary_key=True, autoincrement=False), Column(ue('unitable1_\u6e2c\u8a66'), Integer), Column(u('Unitéble2_b'), Integer), Column(ue('\u6e2c\u8a66_self'), Integer), test_needs_fk=True, ) metadata.create_all()
def _test_get_columns(self, schema=None, table_type="table"): meta = MetaData(testing.db) users, addresses, dingalings = self.tables.users, self.tables.email_addresses, self.tables.dingalings table_names = ["users", "email_addresses"] if table_type == "view": table_names = ["users_v", "email_addresses_v"] insp = inspect(meta.bind) for table_name, table in zip(table_names, (users, addresses)): schema_name = schema cols = insp.get_columns(table_name, schema=schema_name) self.assert_(len(cols) > 0, len(cols)) # should be in order for i, col in enumerate(table.columns): eq_(col.name, cols[i]["name"]) ctype = cols[i]["type"].__class__ ctype_def = col.type if isinstance(ctype_def, sa.types.TypeEngine): ctype_def = ctype_def.__class__ # Oracle returns Date for DateTime. if testing.against("oracle") and ctype_def in (sql_types.Date, sql_types.DateTime): ctype_def = sql_types.Date # assert that the desired type and return type share # a base within one of the generic types. self.assert_( len( set(ctype.__mro__) .intersection(ctype_def.__mro__) .intersection( [ sql_types.Integer, sql_types.Numeric, sql_types.DateTime, sql_types.Date, sql_types.Time, sql_types.String, sql_types._Binary, ] ) ) > 0, "%s(%s), %s(%s)" % (col.name, col.type, cols[i]["name"], ctype), ) if not col.primary_key: assert cols[i]["default"] is None
def test_outer_joinedload_wo_limit(self): User = self.classes.User sess = Session() q = sess.query(User).options( joinedload(User.addresses, innerjoin=False) ) if testing.against("postgresql"): q = q.with_for_update(of=User) else: q = q.with_for_update() q.all() sess.close()
def test_ilike(self): users.insert().execute( {"user_id": 1, "user_name": "one"}, {"user_id": 2, "user_name": "TwO"}, {"user_id": 3, "user_name": "ONE"}, {"user_id": 4, "user_name": "OnE"}, ) eq_(select([users.c.user_id]).where(users.c.user_name.ilike("one")).execute().fetchall(), [(1,), (3,), (4,)]) eq_(select([users.c.user_id]).where(users.c.user_name.ilike("TWO")).execute().fetchall(), [(2,)]) if testing.against("postgresql"): eq_(select([users.c.user_id]).where(users.c.user_name.like("one")).execute().fetchall(), [(1,)]) eq_(select([users.c.user_id]).where(users.c.user_name.like("TWO")).execute().fetchall(), [])
def test_reserved(self): # check a table that uses an SQL reserved name doesn't cause an # error meta = self.metadata table_a = Table( "select", meta, Column("not", sa.Integer, primary_key=True), Column("from", sa.String(12), nullable=False), sa.UniqueConstraint("from", name="when"), ) sa.Index("where", table_a.c["from"]) # There's currently no way to calculate identifier case # normalization in isolation, so... if testing.against("firebird", "oracle", "maxdb"): check_col = "TRUE" else: check_col = "true" quoter = meta.bind.dialect.identifier_preparer.quote_identifier table_b = Table( "false", meta, Column("create", sa.Integer, primary_key=True), Column("true", sa.Integer, sa.ForeignKey("select.not")), sa.CheckConstraint("%s <> 1" % quoter(check_col), name="limit"), ) table_c = Table( "is", meta, Column("or", sa.Integer, nullable=False, primary_key=True), Column("join", sa.Integer, nullable=False, primary_key=True), sa.PrimaryKeyConstraint("or", "join", name="to"), ) index_c = sa.Index("else", table_c.c.join) meta.create_all() index_c.drop() meta2 = MetaData(testing.db) table_a2 = Table("select", meta2, autoload=True) table_b2 = Table("false", meta2, autoload=True) table_c2 = Table("is", meta2, autoload=True)
def test_int_default_none_on_insert(self): metadata = self.metadata t = Table( "x", metadata, Column("y", Integer, server_default="5", primary_key=True), Column("data", String(10)), implicit_returning=False, ) assert t._autoincrement_column is None metadata.create_all() r = t.insert().execute(data="data") eq_(r.inserted_primary_key, [None]) if testing.against("sqlite"): eq_(t.select().execute().fetchall(), [(1, "data")]) else: eq_(t.select().execute().fetchall(), [(5, "data")])
def test_insertmany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql+mysqldb') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return t.insert().execute({}, {}, {}) ctexec = currenttime.scalar() l = t.select().execute() today = datetime.date.today() eq_(l.fetchall(), [(51, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py', 'hi'), (52, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py', 'hi'), (53, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py', 'hi')])