def Table(*args, **kw): """A schema.Table wrapper/hook for dialect-specific tweaks.""" global schema if schema is None: from sqlalchemy import schema test_opts = dict([(k,kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) kw.update(table_options) if testing.against('mysql'): if 'mysql_engine' not in kw and 'mysql_type' not in kw: if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: kw['mysql_engine'] = 'InnoDB' # Apply some default cascading rules for self-referential foreign keys. # MySQL InnoDB has some issues around seleting self-refs too. if testing.against('firebird'): table_name = args[0] unpack = (testing.config.db.dialect. identifier_preparer.unformat_identifiers) # Only going after ForeignKeys in Columns. May need to # expand to ForeignKeyConstraint too. fks = [fk for col in args if isinstance(col, schema.Column) for fk in col.args if isinstance(fk, schema.ForeignKey)] for fk in fks: # root around in raw spec ref = fk._colspec if isinstance(ref, schema.Column): name = ref.table.name else: # take just the table name: on FB there cannot be # a schema, so the first element is always the # table name, possibly followed by the field name name = unpack(ref)[0] if name == table_name: if fk.ondelete is None: fk.ondelete = 'CASCADE' if fk.onupdate is None: fk.onupdate = 'CASCADE' if testing.against('firebird', 'oracle'): pk_seqs = [col for col in args if (isinstance(col, schema.Column) and col.primary_key and getattr(col, '_needs_autoincrement', False))] for c in pk_seqs: c.args.append(schema.Sequence(args[0] + '_' + c.name + '_seq', optional=True)) return schema.Table(*args, **kw)
def define_tables(self, metadata): dt = Table('dt', metadata, Column('id', Integer, primary_key=True), Column('col1', String(20)), Column('col2', String(20), server_default=sa.schema.FetchedValue()), Column('col3', String(20), sa.schema.FetchedValue(for_update=True)), Column('col4', String(20), sa.schema.FetchedValue(), sa.schema.FetchedValue(for_update=True))) for ins in ( sa.DDL("CREATE TRIGGER dt_ins AFTER INSERT ON dt " "FOR EACH ROW BEGIN " "UPDATE dt SET col2='ins', col4='ins' " "WHERE dt.id = NEW.id; END", on='sqlite'), sa.DDL("CREATE TRIGGER dt_ins ON dt AFTER INSERT AS " "UPDATE dt SET col2='ins', col4='ins' " "WHERE dt.id IN (SELECT id FROM inserted);", on='mssql'), ): if testing.against(ins.on): break else: ins = sa.DDL("CREATE TRIGGER dt_ins BEFORE INSERT ON dt " "FOR EACH ROW BEGIN " "SET NEW.col2='ins'; SET NEW.col4='ins'; END") ins.execute_at('after-create', dt) sa.DDL("DROP TRIGGER dt_ins").execute_at('before-drop', dt) for up in ( sa.DDL("CREATE TRIGGER dt_up AFTER UPDATE ON dt " "FOR EACH ROW BEGIN " "UPDATE dt SET col3='up', col4='up' " "WHERE dt.id = OLD.id; END", on='sqlite'), sa.DDL("CREATE TRIGGER dt_up ON dt AFTER UPDATE AS " "UPDATE dt SET col3='up', col4='up' " "WHERE dt.id IN (SELECT id FROM deleted);", on='mssql'), ): if testing.against(up.on): break else: up = sa.DDL("CREATE TRIGGER dt_up BEFORE UPDATE ON dt " "FOR EACH ROW BEGIN " "SET NEW.col3='up'; SET NEW.col4='up'; END") up.execute_at('after-create', dt) sa.DDL("DROP TRIGGER dt_up").execute_at('before-drop', dt)
def test_basic(self): try: # the 'convert_unicode' should not get in the way of the reflection # process. reflecttable for oracle, postgres (others?) expect non-unicode # strings in result sets/bind params bind = engines.utf8_engine(options={'convert_unicode':True}) metadata = MetaData(bind) if testing.against('sybase', 'maxdb', 'oracle', 'mssql'): names = set(['plain']) else: names = set([u'plain', u'Unit\u00e9ble', u'\u6e2c\u8a66']) for name in names: Table(name, metadata, Column('id', sa.Integer, sa.Sequence(name + "_id_seq"), primary_key=True)) metadata.create_all() reflected = set(bind.table_names()) if not names.issubset(reflected): # Python source files in the utf-8 coding seem to normalize # literals as NFC (and the above are explicitly NFC). Maybe # this database normalizes NFD on reflection. nfc = set([unicodedata.normalize('NFC', n) for n in names]) self.assert_(nfc == names) # Yep. But still ensure that bulk reflection and create/drop # work with either normalization. r = MetaData(bind, reflect=True) r.drop_all() r.create_all() finally: metadata.drop_all() bind.dispose()
def get_schema(): # if testing.against('sqlite'): # return None if testing.against('oracle'): return 'test' else: return 'test_schema'
def test_updatemany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return t.insert().execute({}, {}, {}) t.update(t.c.col1==sa.bindparam('pkval')).execute( {'pkval':51,'col7':None, 'col8':None, 'boolcol1':False}) t.update(t.c.col1==sa.bindparam('pkval')).execute( {'pkval':51,}, {'pkval':52,}, {'pkval':53,}) l = t.select().execute() ctexec = currenttime.scalar() today = datetime.date.today() eq_(l.fetchall(), [(51, 'im the update', f2, ts, ts, ctexec, False, False, 13, today, 'py'), (52, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py'), (53, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py')])
def _test_get_indexes(self, schema=None): meta = MetaData(testing.db) (users, addresses) = createTables(meta, schema) meta.create_all() createIndexes(meta.bind, schema) try: # The database may decide to create indexes for foreign keys, etc. # so there may be more indexes than expected. insp = Inspector(meta.bind) indexes = insp.get_indexes('users', schema=schema) indexes.sort() if testing.against('oracle'): expected_indexes = [ {'unique': False, 'column_names': ['TEST1', 'TEST2'], 'name': 'USERS_T_IDX'}] else: expected_indexes = [ {'unique': False, 'column_names': ['test1', 'test2'], 'name': 'users_t_idx'}] index_names = [d['name'] for d in indexes] for e_index in expected_indexes: self.assertTrue(e_index['name'] in index_names) index = indexes[index_names.index(e_index['name'])] for key in e_index: self.assertEqual(e_index[key], index[key]) finally: addresses.drop() users.drop()
def test_aggregate_1(self): if (testing.against('mysql') and testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma')): return query = create_session().query(func.sum(foo.c.bar)) assert query.filter(foo.c.bar<30).one() == (435,)
def test_limit(self): """test limit operations combined with lazy-load relationships.""" mapper(Item, items) mapper(Order, orders, properties={ 'items': relation(Item, secondary=order_items, lazy=True) }) mapper(User, users, properties={ 'addresses': relation(mapper(Address, addresses), lazy=True), 'orders': relation(Order, lazy=True) }) sess = create_session() q = sess.query(User) if testing.against('maxdb', 'mssql'): l = q.limit(2).all() assert self.static.user_all_result[:2] == l else: l = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == l
def test_updatemany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return t.insert().execute({}, {}, {}) t.update(t.c.col1 == sa.bindparam('pkval')).execute({ 'pkval': 51, 'col7': None, 'col8': None, 'boolcol1': False }) t.update(t.c.col1 == sa.bindparam('pkval')).execute({ 'pkval': 51, }, { 'pkval': 52, }, { 'pkval': 53, }) l = t.select().execute() ctexec = currenttime.scalar() today = datetime.date.today() eq_(l.fetchall(), [(51, 'im the update', f2, ts, ts, ctexec, False, False, 13, today, 'py'), (52, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py'), (53, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py')])
def test_aggregate_1(self): if (testing.against('mysql') and testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma')): return query = create_session().query(func.sum(foo.c.bar)) assert query.filter(foo.c.bar < 30).one() == (435, )
def _test_get_columns(self, schema=None, table_type='table'): meta = MetaData(testing.db) (users, addresses) = createTables(meta, schema) table_names = ['users', 'email_addresses'] meta.create_all() if table_type == 'view': createViews(meta.bind, schema) table_names = ['users_v', 'email_addresses_v'] try: insp = Inspector(meta.bind) for (table_name, table) in zip(table_names, (users, addresses)): schema_name = schema if schema and testing.against('oracle'): schema_name = schema.upper() cols = insp.get_columns(table_name, schema=schema_name) self.assert_(len(cols) > 0, len(cols)) # should be in order for (i, col) in enumerate(table.columns): self.assertEqual(col.name, cols[i]['name']) # coltype is tricky # It may not inherit from col.type while they share # the same base. ctype = cols[i]['type'].__class__ ctype_def = col.type if isinstance(ctype_def, sa.types.TypeEngine): ctype_def = ctype_def.__class__ # Oracle returns Date for DateTime. if testing.against('oracle') \ and ctype_def in (sql_types.Date, sql_types.DateTime): ctype_def = sql_types.Date self.assert_( issubclass(ctype, ctype_def) or \ len( set( ctype.__bases__ ).intersection(ctype_def.__bases__)) > 0 ,("%s(%s), %s(%s)" % (col.name, col.type, cols[i]['name'], ctype))) finally: if table_type == 'view': dropViews(meta.bind, schema) addresses.drop() users.drop()
def _test_get_table_oid(self, table_name, schema=None): if testing.against('postgres'): meta = MetaData(testing.db) (users, addresses) = createTables(meta, schema) meta.create_all() try: insp = create_inspector(meta.bind) oid = insp.get_table_oid(table_name, schema) self.assert_(isinstance(oid, int)) finally: addresses.drop() users.drop()
def define_tables(self, metadata): # determine a literal value for "false" based on the dialect # FIXME: this DefaultClause setup is bogus. dialect = testing.db.dialect bp = sa.Boolean().dialect_impl(dialect).bind_processor(dialect) if bp: false = str(bp(False)) elif testing.against('maxdb'): false = text('FALSE') else: false = str(False) self.other_artifacts['false'] = false Table('owners', metadata, Column('id', Integer, primary_key=True, nullable=False), Column('data', String(30))) Table('categories', metadata, Column('id', Integer, primary_key=True, nullable=False), Column('name', String(20))) Table( 'tests', metadata, Column('id', Integer, primary_key=True, nullable=False), Column('owner_id', Integer, ForeignKey('owners.id'), nullable=False), Column('category_id', Integer, ForeignKey('categories.id'), nullable=False)) Table( 'options', metadata, Column('test_id', Integer, ForeignKey('tests.id'), primary_key=True, nullable=False), Column('owner_id', Integer, ForeignKey('owners.id'), primary_key=True, nullable=False), Column('someoption', sa.Boolean, server_default=false, nullable=False))
def test_explicit_default_schema(self): engine = testing.db if testing.against('mysql'): schema = testing.db.url.database elif testing.against('postgres'): schema = 'public' elif testing.against('sqlite'): # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc., # but fails on: # FOREIGN KEY(col2) REFERENCES main.table1 (col1) schema = 'main' else: schema = engine.dialect.get_default_schema_name(engine.connect()) metadata = MetaData(engine) table1 = Table('table1', metadata, Column('col1', sa.Integer, primary_key=True), test_needs_fk=True, schema=schema) table2 = Table('table2', metadata, Column('col1', sa.Integer, primary_key=True), Column('col2', sa.Integer, sa.ForeignKey('%s.table1.col1' % schema)), test_needs_fk=True, schema=schema) try: metadata.create_all() metadata.create_all(checkfirst=True) assert len(metadata.tables) == 2 metadata.clear() table1 = Table('table1', metadata, autoload=True, schema=schema) table2 = Table('table2', metadata, autoload=True, schema=schema) assert len(metadata.tables) == 2 finally: metadata.drop_all()
def test_explicit_default_schema(self): engine = testing.db if testing.against('mysql+mysqldb'): schema = testing.db.url.database elif testing.against('postgres'): schema = 'public' elif testing.against('sqlite'): # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc., # but fails on: # FOREIGN KEY(col2) REFERENCES main.table1 (col1) schema = 'main' else: schema = engine.dialect.get_default_schema_name(engine.connect()) metadata = MetaData(engine) table1 = Table('table1', metadata, Column('col1', sa.Integer, primary_key=True), test_needs_fk=True, schema=schema) table2 = Table('table2', metadata, Column('col1', sa.Integer, primary_key=True), Column('col2', sa.Integer, sa.ForeignKey('%s.table1.col1' % schema)), test_needs_fk=True, schema=schema) try: metadata.create_all() metadata.create_all(checkfirst=True) assert len(metadata.tables) == 2 metadata.clear() table1 = Table('table1', metadata, autoload=True, schema=schema) table2 = Table('table2', metadata, autoload=True, schema=schema) assert len(metadata.tables) == 2 finally: metadata.drop_all()
def Column(*args, **kw): """A schema.Column wrapper/hook for dialect-specific tweaks.""" global schema if schema is None: from sqlalchemy import schema test_opts = dict([(k,kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) c = schema.Column(*args, **kw) if testing.against('firebird', 'oracle'): if 'test_needs_autoincrement' in test_opts: c._needs_autoincrement = True return c
def test_insertmany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return r = t.insert().execute({}, {}, {}) ctexec = currenttime.scalar() l = t.select().execute() today = datetime.date.today() eq_(l.fetchall(), [(51, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py'), (52, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py'), (53, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py')])
def test_reserved(self): # check a table that uses an SQL reserved name doesn't cause an error meta = MetaData(testing.db) table_a = Table('select', meta, Column('not', sa.Integer, primary_key=True), Column('from', sa.String(12), nullable=False), sa.UniqueConstraint('from', name='when')) sa.Index('where', table_a.c['from']) # There's currently no way to calculate identifier case normalization # in isolation, so... if testing.against('firebird', 'oracle', 'maxdb'): check_col = 'TRUE' else: check_col = 'true' quoter = meta.bind.dialect.identifier_preparer.quote_identifier table_b = Table('false', meta, Column('create', sa.Integer, primary_key=True), Column('true', sa.Integer, sa.ForeignKey('select.not')), sa.CheckConstraint('%s <> 1' % quoter(check_col), name='limit')) table_c = Table('is', meta, Column('or', sa.Integer, nullable=False, primary_key=True), Column('join', sa.Integer, nullable=False, primary_key=True), sa.PrimaryKeyConstraint('or', 'join', name='to')) index_c = sa.Index('else', table_c.c.join) meta.create_all() index_c.drop() meta2 = MetaData(testing.db) try: table_a2 = Table('select', meta2, autoload=True) table_b2 = Table('false', meta2, autoload=True) table_c2 = Table('is', meta2, autoload=True) finally: meta.drop_all()
def test_limit(self): """test limit operations combined with lazy-load relationships.""" mapper(Item, items) mapper(Order, orders, properties={ 'items':relation(Item, secondary=order_items, lazy=True) }) mapper(User, users, properties={ 'addresses':relation(mapper(Address, addresses), lazy=True), 'orders':relation(Order, lazy=True) }) sess = create_session() q = sess.query(User) if testing.against('maxdb', 'mssql'): l = q.limit(2).all() assert self.static.user_all_result[:2] == l else: l = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == l
def define_tables(self, metadata): # determine a literal value for "false" based on the dialect # FIXME: this DefaultClause setup is bogus. dialect = testing.db.dialect bp = sa.Boolean().dialect_impl(dialect).bind_processor(dialect) if bp: false = str(bp(False)) elif testing.against('maxdb'): false = text('FALSE') else: false = str(False) self.other_artifacts['false'] = false Table('owners', metadata , Column('id', Integer, primary_key=True, nullable=False), Column('data', String(30))) Table('categories', metadata, Column('id', Integer, primary_key=True, nullable=False), Column('name', String(20))) Table('tests', metadata , Column('id', Integer, primary_key=True, nullable=False ), Column('owner_id', Integer, ForeignKey('owners.id'), nullable=False), Column('category_id', Integer, ForeignKey('categories.id'), nullable=False)) Table('options', metadata , Column('test_id', Integer, ForeignKey('tests.id'), primary_key=True, nullable=False), Column('owner_id', Integer, ForeignKey('owners.id'), primary_key=True, nullable=False), Column('someoption', sa.Boolean, server_default=false, nullable=False))
def define_tables(self, metadata): # determine a literal value for "false" based on the dialect # FIXME: this DefaultClause setup is bogus. dialect = testing.db.dialect bp = sa.Boolean().dialect_impl(dialect).bind_processor(dialect) if bp: false = str(bp(False)) elif testing.against("maxdb"): false = text("FALSE") else: false = str(False) self.other_artifacts["false"] = false Table("owners", metadata, Column("id", Integer, primary_key=True, nullable=False), Column("data", String(30))) Table( "categories", metadata, Column("id", Integer, primary_key=True, nullable=False), Column("name", String(20)) ) Table( "tests", metadata, Column("id", Integer, primary_key=True, nullable=False), Column("owner_id", Integer, ForeignKey("owners.id"), nullable=False), Column("category_id", Integer, ForeignKey("categories.id"), nullable=False), ) Table( "options", metadata, Column("test_id", Integer, ForeignKey("tests.id"), primary_key=True, nullable=False), Column("owner_id", Integer, ForeignKey("owners.id"), primary_key=True, nullable=False), Column("someoption", sa.Boolean, server_default=false, nullable=False), )
def setUpAll(self): global t, f, f2, ts, currenttime, metadata, default_generator db = testing.db metadata = MetaData(db) default_generator = {'x':50} def mydefault(): default_generator['x'] += 1 return default_generator['x'] def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text('13')])).scalar() def mydefault_using_connection(ctx): conn = ctx.connection try: return conn.execute(sa.select([sa.text('12')])).scalar() finally: # ensure a "close()" on this connection does nothing, # since its a "branched" connection conn.close() use_function_defaults = testing.against('postgres', 'mssql', 'maxdb') is_oracle = testing.against('oracle') # select "count(1)" returns different results on different DBs also # correct for "current_date" compatible as column default, value # differences currenttime = func.current_date(type_=sa.Date, bind=db) if is_oracle: ts = db.scalar(sa.select([func.trunc(func.sysdate(), sa.literal_column("'DAY'"), type_=sa.Date).label('today')])) assert isinstance(ts, datetime.date) and not isinstance(ts, datetime.datetime) f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() # TODO: engine propigation across nested functions not working currenttime = func.trunc(currenttime, sa.literal_column("'DAY'"), bind=db, type_=sa.Date) def1 = currenttime def2 = func.trunc(sa.text("sysdate"), sa.literal_column("'DAY'"), type_=sa.Date) deftype = sa.Date elif use_function_defaults: f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date if testing.against('maxdb'): def2 = sa.text("curdate") elif testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") ts = db.func.current_date().scalar() else: f = len('abcdef') f2 = len('abcdefghijk') def1 = def2 = "3" ts = 3 deftype = Integer t = Table('default_test1', metadata, # python function Column('col1', Integer, primary_key=True, default=mydefault), # python literal Column('col2', String(20), default="imthedefault", onupdate="im the update"), # preexecute expression Column('col3', Integer, default=func.length('abcdef'), onupdate=func.length('abcdefghijk')), # SQL-side default from sql expression Column('col4', deftype, server_default=def1), # SQL-side default from literal expression Column('col5', deftype, server_default=def2), # preexecute + update timestamp Column('col6', sa.Date, default=currenttime, onupdate=currenttime), Column('boolcol1', sa.Boolean, default=True), Column('boolcol2', sa.Boolean, default=False), # python function which uses ExecutionContext Column('col7', Integer, default=mydefault_using_connection, onupdate=myupdate_with_ctx), # python builtin Column('col8', sa.Date, default=datetime.date.today, onupdate=datetime.date.today), # combo Column('col9', String(20), default='py', server_default='ddl')) t.create()
def setUpAll(self): global t, f, f2, ts, currenttime, metadata, default_generator db = testing.db metadata = MetaData(db) default_generator = {'x': 50} def mydefault(): default_generator['x'] += 1 return default_generator['x'] def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text('13')])).scalar() def mydefault_using_connection(ctx): conn = ctx.connection try: return conn.execute(sa.select([sa.text('12')])).scalar() finally: # ensure a "close()" on this connection does nothing, # since its a "branched" connection conn.close() use_function_defaults = testing.against('postgres', 'mssql', 'maxdb') is_oracle = testing.against('oracle') # select "count(1)" returns different results on different DBs also # correct for "current_date" compatible as column default, value # differences currenttime = func.current_date(type_=sa.Date, bind=db) if is_oracle: ts = db.scalar( sa.select([ func.trunc(func.sysdate(), sa.literal_column("'DAY'"), type_=sa.Date).label('today') ])) assert isinstance( ts, datetime.date) and not isinstance(ts, datetime.datetime) f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() # TODO: engine propigation across nested functions not working currenttime = func.trunc(currenttime, sa.literal_column("'DAY'"), bind=db, type_=sa.Date) def1 = currenttime def2 = func.trunc(sa.text("sysdate"), sa.literal_column("'DAY'"), type_=sa.Date) deftype = sa.Date elif use_function_defaults: f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date if testing.against('maxdb'): def2 = sa.text("curdate") elif testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") ts = db.func.current_date().scalar() else: f = len('abcdef') f2 = len('abcdefghijk') def1 = def2 = "3" ts = 3 deftype = Integer t = Table( 'default_test1', metadata, # python function Column('col1', Integer, primary_key=True, default=mydefault), # python literal Column('col2', String(20), default="imthedefault", onupdate="im the update"), # preexecute expression Column('col3', Integer, default=func.length('abcdef'), onupdate=func.length('abcdefghijk')), # SQL-side default from sql expression Column('col4', deftype, server_default=def1), # SQL-side default from literal expression Column('col5', deftype, server_default=def2), # preexecute + update timestamp Column('col6', sa.Date, default=currenttime, onupdate=currenttime), Column('boolcol1', sa.Boolean, default=True), Column('boolcol2', sa.Boolean, default=False), # python function which uses ExecutionContext Column('col7', Integer, default=mydefault_using_connection, onupdate=myupdate_with_ctx), # python builtin Column('col8', sa.Date, default=datetime.date.today, onupdate=datetime.date.today), # combo Column('col9', String(20), default='py', server_default='ddl')) t.create()
class ExecuteTest(TestBase): def setUpAll(self): global users, metadata metadata = MetaData(testing.db) users = Table( 'users', metadata, Column('user_id', INT, primary_key=True), Column('user_name', VARCHAR(20)), ) metadata.create_all() def tearDown(self): testing.db.connect().execute(users.delete()) def tearDownAll(self): metadata.drop_all() @testing.fails_on_everything_except('firebird', 'maxdb', 'sqlite') def test_raw_qmark(self): for conn in (testing.db, testing.db.connect()): conn.execute( "insert into users (user_id, user_name) values (?, ?)", (1, "jack")) conn.execute( "insert into users (user_id, user_name) values (?, ?)", [2, "fred"]) conn.execute( "insert into users (user_id, user_name) values (?, ?)", [3, "ed"], [4, "horse"]) conn.execute( "insert into users (user_id, user_name) values (?, ?)", (5, "barney"), (6, "donkey")) conn.execute( "insert into users (user_id, user_name) values (?, ?)", 7, 'sally') res = conn.execute("select * from users order by user_id") assert res.fetchall() == [(1, "jack"), (2, "fred"), (3, "ed"), (4, "horse"), (5, "barney"), (6, "donkey"), (7, 'sally')] conn.execute("delete from users") @testing.fails_on_everything_except('mysql', 'postgres') # some psycopg2 versions bomb this. def test_raw_sprintf(self): for conn in (testing.db, testing.db.connect()): conn.execute( "insert into users (user_id, user_name) values (%s, %s)", [1, "jack"]) conn.execute( "insert into users (user_id, user_name) values (%s, %s)", [2, "ed"], [3, "horse"]) conn.execute( "insert into users (user_id, user_name) values (%s, %s)", 4, 'sally') conn.execute("insert into users (user_id) values (%s)", 5) res = conn.execute("select * from users order by user_id") assert res.fetchall() == [(1, "jack"), (2, "ed"), (3, "horse"), (4, 'sally'), (5, None)] conn.execute("delete from users") # pyformat is supported for mysql, but skipping because a few driver # versions have a bug that bombs out on this test. (1.2.2b3, 1.2.2c1, 1.2.2) @testing.skip_if(lambda: testing.against('mysql'), 'db-api flaky') @testing.fails_on_everything_except('postgres') def test_raw_python(self): for conn in (testing.db, testing.db.connect()): conn.execute( "insert into users (user_id, user_name) values (%(id)s, %(name)s)", { 'id': 1, 'name': 'jack' }) conn.execute( "insert into users (user_id, user_name) values (%(id)s, %(name)s)", { 'id': 2, 'name': 'ed' }, { 'id': 3, 'name': 'horse' }) conn.execute( "insert into users (user_id, user_name) values (%(id)s, %(name)s)", id=4, name='sally') res = conn.execute("select * from users order by user_id") assert res.fetchall() == [(1, "jack"), (2, "ed"), (3, "horse"), (4, 'sally')] conn.execute("delete from users") @testing.fails_on_everything_except('sqlite', 'oracle') def test_raw_named(self): for conn in (testing.db, testing.db.connect()): conn.execute( "insert into users (user_id, user_name) values (:id, :name)", { 'id': 1, 'name': 'jack' }) conn.execute( "insert into users (user_id, user_name) values (:id, :name)", { 'id': 2, 'name': 'ed' }, { 'id': 3, 'name': 'horse' }) conn.execute( "insert into users (user_id, user_name) values (:id, :name)", id=4, name='sally') res = conn.execute("select * from users order by user_id") assert res.fetchall() == [(1, "jack"), (2, "ed"), (3, "horse"), (4, 'sally')] conn.execute("delete from users") def test_exception_wrapping(self): for conn in (testing.db, testing.db.connect()): try: conn.execute("osdjafioajwoejoasfjdoifjowejfoawejqoijwef") assert False except tsa.exc.DBAPIError: assert True @testing.fails_on('mssql', 'rowcount returns -1') def test_empty_insert(self): """test that execute() interprets [] as a list with no params""" result = testing.db.execute( users.insert().values(user_name=bindparam('name')), []) self.assertEquals(result.rowcount, 1)