def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') elif testing.against('mysql'): fk_args = {} else: fk_args = dict(onupdate='cascade') Table( 'users', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), ) Table( 'addresses', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', Integer, ForeignKey('users.id', **fk_args)), )
def _non_default_isolation_level(self): if testing.against('sqlite'): return 'READ UNCOMMITTED' elif testing.against('postgresql'): return 'SERIALIZABLE' else: assert False, "non default isolation level not known"
def _default_isolation_level(self): if testing.against('sqlite'): return 'SERIALIZABLE' elif testing.against('postgresql'): return 'READ COMMITTED' elif testing.against('mysql'): return "REPEATABLE READ" else: assert False, "default isolation level not known"
def _non_default_isolation_level(self): if testing.against('sqlite'): return 'READ UNCOMMITTED' elif testing.against('postgresql'): return 'SERIALIZABLE' elif testing.against('mysql'): return "SERIALIZABLE" else: assert False, "non default isolation level not known"
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') elif testing.against('mysql'): fk_args = {} else: fk_args = dict(onupdate='cascade') Table('users', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), ) Table('addresses', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', Integer, ForeignKey('users.id', **fk_args)), )
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') Table('person', metadata, Column('name', String(50), primary_key=True), Column('type', String(50), nullable=False), test_needs_fk=True) Table('engineer', metadata, Column('name', String(50), ForeignKey('person.name', **fk_args), primary_key=True), Column('primary_language', String(50)), Column('boss_name', String(50), ForeignKey('manager.name', **fk_args)), test_needs_fk=True ) Table('manager', metadata, Column('name', String(50), ForeignKey('person.name', **fk_args), primary_key=True), Column('paperwork', String(50)), test_needs_fk=True )
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') Table('person', metadata, Column('name', String(50), primary_key=True), Column('type', String(50), nullable=False), test_needs_fk=True) Table('engineer', metadata, Column('name', String(50), ForeignKey('person.name', **fk_args), primary_key=True), Column('primary_language', String(50)), Column('boss_name', String(50), ForeignKey('manager.name', **fk_args)), test_needs_fk=True) Table('manager', metadata, Column('name', String(50), ForeignKey('person.name', **fk_args), primary_key=True), Column('paperwork', String(50)), test_needs_fk=True)
def test_updatemany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql+mysqldb') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return t.insert().execute({}, {}, {}) t.update(t.c.col1 == sa.bindparam('pkval')).execute({ 'pkval': 51, 'col7': None, 'col8': None, 'boolcol1': False }) t.update(t.c.col1 == sa.bindparam('pkval')).execute({ 'pkval': 51, }, { 'pkval': 52, }, { 'pkval': 53, }) l = t.select().execute() ctexec = currenttime.scalar() today = datetime.date.today() eq_(l.fetchall(), [(51, 'im the update', f2, ts, ts, ctexec, False, False, 13, today, 'py'), (52, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py'), (53, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py')])
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') Table('users', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('username', String(50), unique=True), Column('fullname', String(100)), test_needs_fk=True) Table('addresses', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('email', String(50)), Column('username', String(50), ForeignKey('users.username', **fk_args)), test_needs_fk=True)
def test_updatemany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql+mysqldb') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return t.insert().execute({}, {}, {}) t.update(t.c.col1==sa.bindparam('pkval')).execute( {'pkval':51,'col7':None, 'col8':None, 'boolcol1':False}) t.update(t.c.col1==sa.bindparam('pkval')).execute( {'pkval':51,}, {'pkval':52,}, {'pkval':53,}) l = t.select().execute() ctexec = currenttime.scalar() today = datetime.date.today() eq_(l.fetchall(), [(51, 'im the update', f2, ts, ts, ctexec, False, False, 13, today, 'py'), (52, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py'), (53, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py')])
def test_limit(self): """test limit operations combined with lazy-load relationships.""" users, items, order_items, orders, Item, User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses) mapper(Item, items) mapper(Order, orders, properties={ 'items': relationship(Item, secondary=order_items, lazy='select') }) mapper(User, users, properties={ 'addresses': relationship(mapper(Address, addresses), lazy='select'), 'orders': relationship(Order, lazy='select') }) sess = create_session() q = sess.query(User) if testing.against('maxdb', 'mssql'): l = q.limit(2).all() assert self.static.user_all_result[:2] == l else: l = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == l
def test_int_default_none_on_insert_reflected(self): metadata = self.metadata t = Table('x', metadata, Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)), implicit_returning=False ) metadata.create_all() m2 = MetaData(metadata.bind) t2 = Table('x', m2, autoload=True, implicit_returning=False) r = t2.insert().execute(data='data') eq_(r.inserted_primary_key, [None]) if testing.against('sqlite'): eq_( t2.select().execute().fetchall(), [(1, 'data')] ) else: eq_( t2.select().execute().fetchall(), [(5, 'data')] )
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') users = Table('users', metadata, Column('username', String(50), primary_key=True), Column('fullname', String(100)), test_needs_fk=True) addresses = Table('addresses', metadata, Column('email', String(50), primary_key=True), Column('username', String(50), ForeignKey('users.username', **fk_args)), test_needs_fk=True) items = Table('items', metadata, Column('itemname', String(50), primary_key=True), Column('description', String(100)), test_needs_fk=True) users_to_items = Table('users_to_items', metadata, Column('username', String(50), ForeignKey('users.username', **fk_args), primary_key=True), Column('itemname', String(50), ForeignKey('items.itemname', **fk_args), primary_key=True), test_needs_fk=True)
def test_limit(self): """test limit operations combined with lazy-load relationships.""" users, items, order_items, orders, Item, User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) mapper(Item, items) mapper(Order, orders, properties={"items": relationship(Item, secondary=order_items, lazy="select")}) mapper( User, users, properties={ "addresses": relationship(mapper(Address, addresses), lazy="select"), "orders": relationship(Order, lazy="select"), }, ) sess = create_session() q = sess.query(User) if testing.against("maxdb", "mssql"): l = q.limit(2).all() assert self.static.user_all_result[:2] == l else: l = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == l
def Table(*args, **kw): """A schema.Table wrapper/hook for dialect-specific tweaks.""" test_opts = dict([(k, kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) kw.update(table_options) if testing.against('mysql'): if 'mysql_engine' not in kw and 'mysql_type' not in kw: if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: kw['mysql_engine'] = 'InnoDB' else: kw['mysql_engine'] = 'MyISAM' # Apply some default cascading rules for self-referential foreign keys. # MySQL InnoDB has some issues around seleting self-refs too. if testing.against('firebird'): table_name = args[0] unpack = ( testing.config.db.dialect.identifier_preparer.unformat_identifiers) # Only going after ForeignKeys in Columns. May need to # expand to ForeignKeyConstraint too. fks = [ fk for col in args if isinstance(col, schema.Column) for fk in col.foreign_keys ] for fk in fks: # root around in raw spec ref = fk._colspec if isinstance(ref, schema.Column): name = ref.table.name else: # take just the table name: on FB there cannot be # a schema, so the first element is always the # table name, possibly followed by the field name name = unpack(ref)[0] if name == table_name: if fk.ondelete is None: fk.ondelete = 'CASCADE' if fk.onupdate is None: fk.onupdate = 'CASCADE' return schema.Table(*args, **kw)
def Table(*args, **kw): """A schema.Table wrapper/hook for dialect-specific tweaks.""" test_opts = dict([(k,kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) kw.update(table_options) if testing.against('mysql'): if 'mysql_engine' not in kw and 'mysql_type' not in kw: if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: kw['mysql_engine'] = 'InnoDB' else: kw['mysql_engine'] = 'MyISAM' # Apply some default cascading rules for self-referential foreign keys. # MySQL InnoDB has some issues around seleting self-refs too. if testing.against('firebird'): table_name = args[0] unpack = (testing.config.db.dialect. identifier_preparer.unformat_identifiers) # Only going after ForeignKeys in Columns. May need to # expand to ForeignKeyConstraint too. fks = [fk for col in args if isinstance(col, schema.Column) for fk in col.foreign_keys] for fk in fks: # root around in raw spec ref = fk._colspec if isinstance(ref, schema.Column): name = ref.table.name else: # take just the table name: on FB there cannot be # a schema, so the first element is always the # table name, possibly followed by the field name name = unpack(ref)[0] if name == table_name: if fk.ondelete is None: fk.ondelete = 'CASCADE' if fk.onupdate is None: fk.onupdate = 'CASCADE' return schema.Table(*args, **kw)
def setup_class(cls): global unicode_bind, metadata, t1, t2, t3 unicode_bind = utf8_engine() metadata = MetaData(unicode_bind) t1 = Table( 'unitable1', metadata, Column(u'méil', Integer, primary_key=True), Column(u'\u6e2c\u8a66', Integer), test_needs_fk=True, ) t2 = Table( u'Unitéble2', metadata, Column(u'méil', Integer, primary_key=True, key="a"), Column(u'\u6e2c\u8a66', Integer, ForeignKey(u'unitable1.méil'), key="b"), test_needs_fk=True, ) # Few DBs support Unicode foreign keys if testing.against('sqlite'): t3 = Table( u'\u6e2c\u8a66', metadata, Column(u'\u6e2c\u8a66_id', Integer, primary_key=True, autoincrement=False), Column(u'unitable1_\u6e2c\u8a66', Integer, ForeignKey(u'unitable1.\u6e2c\u8a66')), Column(u'Unitéble2_b', Integer, ForeignKey(u'Unitéble2.b')), Column(u'\u6e2c\u8a66_self', Integer, ForeignKey(u'\u6e2c\u8a66.\u6e2c\u8a66_id')), test_needs_fk=True, ) else: t3 = Table( u'\u6e2c\u8a66', metadata, Column(u'\u6e2c\u8a66_id', Integer, primary_key=True, autoincrement=False), Column(u'unitable1_\u6e2c\u8a66', Integer), Column(u'Unitéble2_b', Integer), Column(u'\u6e2c\u8a66_self', Integer), test_needs_fk=True, ) metadata.create_all()
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') Table('nodes', metadata, Column('name', String(50), primary_key=True), Column('parent', String(50), ForeignKey('nodes.name', **fk_args)), test_needs_fk=True )
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') Table('nodes', metadata, Column('name', String(50), primary_key=True), Column('parent', String(50), ForeignKey('nodes.name', **fk_args)), test_needs_fk=True)
def test_int_default_none_on_insert(self): metadata = self.metadata t = Table('x', metadata, Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)), implicit_returning=False) assert t._autoincrement_column is None metadata.create_all() r = t.insert().execute(data='data') eq_(r.inserted_primary_key, [None]) if testing.against('sqlite'): eq_(t.select().execute().fetchall(), [(1, 'data')]) else: eq_(t.select().execute().fetchall(), [(5, 'data')])
def Column(*args, **kw): """A schema.Column wrapper/hook for dialect-specific tweaks.""" test_opts = dict([(k,kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) col = schema.Column(*args, **kw) if 'test_needs_autoincrement' in test_opts and \ kw.get('primary_key', False) and \ testing.against('firebird', 'oracle'): def add_seq(c, tbl): c._init_items( schema.Sequence(_truncate_name(testing.db.dialect, tbl.name + '_' + c.name + '_seq'), optional=True) ) event.listen(col, 'after_parent_attach', add_seq, propagate=True) return col
def test_table_alias_1(self): table2 = self.tables.table2 if testing.against('oracle'): self.assert_compile( table2.alias().select(), "SELECT table_with_exactly_29_c_1." "this_is_the_primarykey_column, " "table_with_exactly_29_c_1.this_is_the_data_column " "FROM table_with_exactly_29_characs " "table_with_exactly_29_c_1") else: self.assert_compile( table2.alias().select(), "SELECT table_with_exactly_29_c_1." "this_is_the_primarykey_column, " "table_with_exactly_29_c_1.this_is_the_data_column " "FROM table_with_exactly_29_characs AS " "table_with_exactly_29_c_1")
def test_int_default_none_on_insert(self): metadata = self.metadata t = Table( "x", metadata, Column("y", Integer, server_default="5", primary_key=True), Column("data", String(10)), implicit_returning=False, ) assert t._autoincrement_column is None metadata.create_all() r = t.insert().execute(data="data") eq_(r.inserted_primary_key, [None]) if testing.against("sqlite"): eq_(t.select().execute().fetchall(), [(1, "data")]) else: eq_(t.select().execute().fetchall(), [(5, "data")])
def test_insertmany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql+mysqldb') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return r = t.insert().execute({}, {}, {}) ctexec = currenttime.scalar() l = t.select().execute() today = datetime.date.today() eq_(l.fetchall(), [(51, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py'), (52, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py'), (53, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py')])
def setup_class(cls): global unicode_bind, metadata, t1, t2, t3 unicode_bind = utf8_engine() metadata = MetaData(unicode_bind) t1 = Table('unitable1', metadata, Column(u'méil', Integer, primary_key=True), Column(u'\u6e2c\u8a66', Integer), test_needs_fk=True, ) t2 = Table(u'Unitéble2', metadata, Column(u'méil', Integer, primary_key=True, key="a"), Column(u'\u6e2c\u8a66', Integer, ForeignKey(u'unitable1.méil'), key="b" ), test_needs_fk=True, ) # Few DBs support Unicode foreign keys if testing.against('sqlite'): t3 = Table(u'\u6e2c\u8a66', metadata, Column(u'\u6e2c\u8a66_id', Integer, primary_key=True, autoincrement=False), Column(u'unitable1_\u6e2c\u8a66', Integer, ForeignKey(u'unitable1.\u6e2c\u8a66') ), Column(u'Unitéble2_b', Integer, ForeignKey(u'Unitéble2.b') ), Column(u'\u6e2c\u8a66_self', Integer, ForeignKey(u'\u6e2c\u8a66.\u6e2c\u8a66_id') ), test_needs_fk=True, ) else: t3 = Table(u'\u6e2c\u8a66', metadata, Column(u'\u6e2c\u8a66_id', Integer, primary_key=True, autoincrement=False), Column(u'unitable1_\u6e2c\u8a66', Integer), Column(u'Unitéble2_b', Integer), Column(u'\u6e2c\u8a66_self', Integer), test_needs_fk=True, ) metadata.create_all()
def Column(*args, **kw): """A schema.Column wrapper/hook for dialect-specific tweaks.""" test_opts = dict([(k, kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) col = schema.Column(*args, **kw) if 'test_needs_autoincrement' in test_opts and \ kw.get('primary_key', False) and \ testing.against('firebird', 'oracle'): def add_seq(c, tbl): c._init_items( schema.Sequence(_truncate_name( testing.db.dialect, tbl.name + '_' + c.name + '_seq'), optional=True)) event.listen(col, 'after_parent_attach', add_seq, propagate=True) return col
def Column(*args, **kw): """A schema.Column wrapper/hook for dialect-specific tweaks.""" test_opts = dict([(k, kw.pop(k)) for k in kw.keys() if k.startswith("test_")]) col = schema.Column(*args, **kw) if ( "test_needs_autoincrement" in test_opts and kw.get("primary_key", False) and testing.against("firebird", "oracle") ): def add_seq(c, tbl): c._init_items( schema.Sequence(_truncate_name(testing.db.dialect, tbl.name + "_" + c.name + "_seq"), optional=True) ) event.listen(col, "after_parent_attach", add_seq, propagate=True) return col
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') Table('users', metadata, Column('username', String(50), primary_key=True), test_needs_fk=True) Table('addresses', metadata, Column('username', String(50), ForeignKey('users.username', **fk_args), primary_key=True ), Column('email', String(50), primary_key=True), Column('etc', String(50)), test_needs_fk=True )
def test_int_default_none_on_insert_reflected(self): metadata = self.metadata t = Table( "x", metadata, Column("y", Integer, server_default="5", primary_key=True), Column("data", String(10)), implicit_returning=False, ) metadata.create_all() m2 = MetaData(metadata.bind) t2 = Table("x", m2, autoload=True, implicit_returning=False) r = t2.insert().execute(data="data") eq_(r.inserted_primary_key, [None]) if testing.against("sqlite"): eq_(t2.select().execute().fetchall(), [(1, "data")]) else: eq_(t2.select().execute().fetchall(), [(5, "data")])
def test_table_alias_1(self): table2 = self.tables.table2 if testing.against('oracle'): self.assert_compile( table2.alias().select(), "SELECT table_with_exactly_29_c_1." "this_is_the_primarykey_column, " "table_with_exactly_29_c_1.this_is_the_data_column " "FROM table_with_exactly_29_characs " "table_with_exactly_29_c_1" ) else: self.assert_compile( table2.alias().select(), "SELECT table_with_exactly_29_c_1." "this_is_the_primarykey_column, " "table_with_exactly_29_c_1.this_is_the_data_column " "FROM table_with_exactly_29_characs AS " "table_with_exactly_29_c_1" )
def test_int_default_none_on_insert(self): metadata = self.metadata t = Table('x', metadata, Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)), implicit_returning=False ) assert t._autoincrement_column is None metadata.create_all() r = t.insert().execute(data='data') eq_(r.inserted_primary_key, [None]) if testing.against('sqlite'): eq_( t.select().execute().fetchall(), [(1, 'data')] ) else: eq_( t.select().execute().fetchall(), [(5, 'data')] )
def test_updatemany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if testing.against("mysql+mysqldb") and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2): return t.insert().execute({}, {}, {}) t.update(t.c.col1 == sa.bindparam("pkval")).execute( {"pkval": 51, "col7": None, "col8": None, "boolcol1": False} ) t.update(t.c.col1 == sa.bindparam("pkval")).execute({"pkval": 51}, {"pkval": 52}, {"pkval": 53}) l = t.select().execute() ctexec = currenttime.scalar() today = datetime.date.today() eq_( l.fetchall(), [ (51, "im the update", f2, ts, ts, ctexec, False, False, 13, today, "py"), (52, "im the update", f2, ts, ts, ctexec, True, False, 13, today, "py"), (53, "im the update", f2, ts, ts, ctexec, True, False, 13, today, "py"), ], )
class GenerativeQueryTest(fixtures.MappedTest): run_inserts = 'once' run_deletes = None @classmethod def define_tables(cls, metadata): Table( 'foo', metadata, Column('id', Integer, sa.Sequence('foo_id_seq'), primary_key=True), Column('bar', Integer), Column('range', Integer)) @classmethod def fixtures(cls): rows = tuple([(i, i % 10) for i in range(100)]) foo_data = (('bar', 'range'), ) + rows return dict(foo=foo_data) @classmethod def setup_mappers(cls): foo = cls.tables.foo class Foo(cls.Basic): pass mapper(Foo, foo) def test_selectby(self): Foo = self.classes.Foo res = create_session().query(Foo).filter_by(range=5) assert res.order_by(Foo.bar)[0].bar == 5 assert res.order_by(sa.desc(Foo.bar))[0].bar == 95 @testing.fails_on('maxdb', 'FIXME: unknown') def test_slice(self): Foo = self.classes.Foo sess = create_session() query = sess.query(Foo).order_by(Foo.id) orig = query.all() assert query[1] == orig[1] assert query[-4] == orig[-4] assert query[-1] == orig[-1] assert list(query[10:20]) == orig[10:20] assert list(query[10:]) == orig[10:] assert list(query[:10]) == orig[:10] assert list(query[:10]) == orig[:10] assert list(query[5:5]) == orig[5:5] assert list(query[10:40:3]) == orig[10:40:3] assert list(query[-5:]) == orig[-5:] assert list(query[-2:-5]) == orig[-2:-5] assert list(query[-5:-2]) == orig[-5:-2] assert list(query[:-2]) == orig[:-2] assert query[10:20][5] == orig[10:20][5] @testing.uses_deprecated('Call to deprecated function apply_max') def test_aggregate(self): foo, Foo = self.tables.foo, self.classes.Foo sess = create_session() query = sess.query(Foo) assert query.count() == 100 assert sess.query(func.min( foo.c.bar)).filter(foo.c.bar < 30).one() == (0, ) assert sess.query(func.max( foo.c.bar)).filter(foo.c.bar < 30).one() == (29, ) # Py3K #assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29 #assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29 # Py2K assert query.filter(foo.c.bar < 30).values(sa.func.max( foo.c.bar)).next()[0] == 29 assert query.filter(foo.c.bar < 30).values(sa.func.max( foo.c.bar)).next()[0] == 29 # end Py2K @testing.fails_if(lambda: testing.against( 'mysql+mysqldb') and testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma'), "unknown incompatibility") def test_aggregate_1(self): foo = self.tables.foo query = create_session().query(func.sum(foo.c.bar)) assert query.filter(foo.c.bar < 30).one() == (435, ) @testing.fails_on('firebird', 'FIXME: unknown') @testing.fails_on( 'mssql', 'AVG produces an average as the original column type on mssql.') def test_aggregate_2(self): foo = self.tables.foo query = create_session().query(func.avg(foo.c.bar)) avg = query.filter(foo.c.bar < 30).one()[0] eq_(float(round(avg, 1)), 14.5) @testing.fails_on( 'mssql', 'AVG produces an average as the original column type on mssql.') def test_aggregate_3(self): foo, Foo = self.tables.foo, self.classes.Foo query = create_session().query(Foo) # Py3K #avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0] # Py2K avg_f = query.filter(foo.c.bar < 30).values(sa.func.avg( foo.c.bar)).next()[0] # end Py2K assert float(round(avg_f, 1)) == 14.5 # Py3K #avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0] # Py2K avg_o = query.filter(foo.c.bar < 30).values(sa.func.avg( foo.c.bar)).next()[0] # end Py2K assert float(round(avg_o, 1)) == 14.5 def test_filter(self): Foo = self.classes.Foo query = create_session().query(Foo) assert query.count() == 100 assert query.filter(Foo.bar < 30).count() == 30 res2 = query.filter(Foo.bar < 30).filter(Foo.bar > 10) assert res2.count() == 19 def test_order_by(self): Foo = self.classes.Foo query = create_session().query(Foo) assert query.order_by(Foo.bar)[0].bar == 0 assert query.order_by(sa.desc(Foo.bar))[0].bar == 99 def test_offset(self): Foo = self.classes.Foo query = create_session().query(Foo) assert list(query.order_by(Foo.bar).offset(10))[0].bar == 10 def test_offset(self): Foo = self.classes.Foo query = create_session().query(Foo) assert len(list(query.limit(10))) == 10
def setup_class(cls): global t, f, f2, ts, currenttime, metadata, default_generator db = testing.db metadata = MetaData(db) default_generator = {'x': 50} def mydefault(): default_generator['x'] += 1 return default_generator['x'] def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text('13')])).scalar() def mydefault_using_connection(ctx): conn = ctx.connection try: return conn.execute(sa.select([sa.text('12')])).scalar() finally: # ensure a "close()" on this connection does nothing, # since its a "branched" connection conn.close() use_function_defaults = testing.against('postgresql', 'mssql', 'maxdb') is_oracle = testing.against('oracle') # select "count(1)" returns different results on different DBs also # correct for "current_date" compatible as column default, value # differences currenttime = func.current_date(type_=sa.Date, bind=db) if is_oracle: ts = db.scalar( sa.select([ func.trunc(func.sysdate(), sa.literal_column("'DAY'"), type_=sa.Date).label('today') ])) assert isinstance( ts, datetime.date) and not isinstance(ts, datetime.datetime) f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() # TODO: engine propigation across nested functions not working currenttime = func.trunc(currenttime, sa.literal_column("'DAY'"), bind=db, type_=sa.Date) def1 = currenttime def2 = func.trunc(sa.text("sysdate"), sa.literal_column("'DAY'"), type_=sa.Date) deftype = sa.Date elif use_function_defaults: f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date if testing.against('maxdb'): def2 = sa.text("curdate") elif testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") ts = db.scalar(func.current_date()) else: f = len('abcdef') f2 = len('abcdefghijk') def1 = def2 = "3" ts = 3 deftype = Integer t = Table( 'default_test1', metadata, # python function Column('col1', Integer, primary_key=True, default=mydefault), # python literal Column('col2', String(20), default="imthedefault", onupdate="im the update"), # preexecute expression Column('col3', Integer, default=func.length('abcdef'), onupdate=func.length('abcdefghijk')), # SQL-side default from sql expression Column('col4', deftype, server_default=def1), # SQL-side default from literal expression Column('col5', deftype, server_default=def2), # preexecute + update timestamp Column('col6', sa.Date, default=currenttime, onupdate=currenttime), Column('boolcol1', sa.Boolean, default=True), Column('boolcol2', sa.Boolean, default=False), # python function which uses ExecutionContext Column('col7', Integer, default=mydefault_using_connection, onupdate=myupdate_with_ctx), # python builtin Column('col8', sa.Date, default=datetime.date.today, onupdate=datetime.date.today), # combo Column('col9', String(20), default='py', server_default='ddl')) t.create()
class NaturalPKTest(fixtures.MappedTest): # MySQL 5.5 on Windows crashes (the entire server, not the client) # if you screw around with ON UPDATE CASCADE type of stuff. __requires__ = 'skip_mysql_on_windows', @classmethod def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') users = Table('users', metadata, Column('username', String(50), primary_key=True), Column('fullname', String(100)), test_needs_fk=True) addresses = Table('addresses', metadata, Column('email', String(50), primary_key=True), Column('username', String(50), ForeignKey('users.username', **fk_args)), test_needs_fk=True) items = Table('items', metadata, Column('itemname', String(50), primary_key=True), Column('description', String(100)), test_needs_fk=True) users_to_items = Table('users_to_items', metadata, Column('username', String(50), ForeignKey('users.username', **fk_args), primary_key=True), Column('itemname', String(50), ForeignKey('items.itemname', **fk_args), primary_key=True), test_needs_fk=True) @classmethod def setup_classes(cls): class User(cls.Comparable): pass class Address(cls.Comparable): pass class Item(cls.Comparable): pass def test_entity(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = create_session() u1 = User(username='******', fullname='jack') sess.add(u1) sess.flush() assert sess.query(User).get('jack') is u1 u1.username = '******' sess.flush() def go(): assert sess.query(User).get('ed') is u1 self.assert_sql_count(testing.db, go, 0) assert sess.query(User).get('jack') is None sess.expunge_all() u1 = sess.query(User).get('ed') eq_(User(username='******', fullname='jack'), u1) def test_load_after_expire(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = create_session() u1 = User(username='******', fullname='jack') sess.add(u1) sess.flush() assert sess.query(User).get('jack') is u1 users.update(values={User.username: '******'}).execute(username='******') # expire/refresh works off of primary key. the PK is gone # in this case so theres no way to look it up. criterion- # based session invalidation could solve this [ticket:911] sess.expire(u1) assert_raises(sa.orm.exc.ObjectDeletedError, getattr, u1, 'username') sess.expunge_all() assert sess.query(User).get('jack') is None assert sess.query(User).get('ed').fullname == 'jack' def test_flush_new_pk_after_expire(self): User, users = self.classes.User, self.tables.users mapper(User, users) sess = create_session() u1 = User(username='******', fullname='jack') sess.add(u1) sess.flush() assert sess.query(User).get('jack') is u1 sess.expire(u1) u1.username = '******' sess.flush() sess.expunge_all() assert sess.query(User).get('ed').fullname == 'jack' @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE') @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE') def test_onetomany_passive(self): self._test_onetomany(True) def test_onetomany_nonpassive(self): self._test_onetomany(False) def _test_onetomany(self, passive_updates): users, Address, addresses, User = (self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses': relationship(Address, passive_updates=passive_updates) }) mapper(Address, addresses) sess = create_session() u1 = User(username='******', fullname='jack') u1.addresses.append(Address(email='jack1')) u1.addresses.append(Address(email='jack2')) sess.add(u1) sess.flush() assert sess.query(Address).get('jack1') is u1.addresses[0] u1.username = '******' sess.flush() assert u1.addresses[0].username == 'ed' sess.expunge_all() eq_([Address(username='******'), Address(username='******')], sess.query(Address).all()) u1 = sess.query(User).get('ed') u1.username = '******' def go(): sess.flush() if not passive_updates: # test passive_updates=False; #load addresses, update user, update 2 addresses self.assert_sql_count(testing.db, go, 4) else: # test passive_updates=True; update user self.assert_sql_count(testing.db, go, 1) sess.expunge_all() assert User(username='******', addresses=[ Address(username='******'), Address(username='******')]) == \ sess.query(User).get('jack') u1 = sess.query(User).get('jack') u1.addresses = [] u1.username = '******' sess.flush() sess.expunge_all() assert sess.query(Address).get('jack1').username is None u1 = sess.query(User).get('fred') eq_(User(username='******', fullname='jack'), u1) @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE') @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE') def test_manytoone_passive(self): self._test_manytoone(True) def test_manytoone_nonpassive(self): self._test_manytoone(False) def _test_manytoone(self, passive_updates): users, Address, addresses, User = (self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users) mapper(Address, addresses, properties={ 'user': relationship(User, passive_updates=passive_updates) }) sess = create_session() a1 = Address(email='jack1') a2 = Address(email='jack2') u1 = User(username='******', fullname='jack') a1.user = u1 a2.user = u1 sess.add(a1) sess.add(a2) sess.flush() u1.username = '******' def go(): sess.flush() if passive_updates: self.assert_sql_count(testing.db, go, 1) else: self.assert_sql_count(testing.db, go, 3) def go(): sess.flush() self.assert_sql_count(testing.db, go, 0) assert a1.username == a2.username == 'ed' sess.expunge_all() eq_([Address(username='******'), Address(username='******')], sess.query(Address).all()) @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE') @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE') def test_onetoone_passive(self): self._test_onetoone(True) def test_onetoone_nonpassive(self): self._test_onetoone(False) def _test_onetoone(self, passive_updates): users, Address, addresses, User = (self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ "address": relationship(Address, passive_updates=passive_updates, uselist=False) }) mapper(Address, addresses) sess = create_session() u1 = User(username='******', fullname='jack') sess.add(u1) sess.flush() a1 = Address(email='jack1') u1.address = a1 sess.add(a1) sess.flush() u1.username = '******' def go(): sess.flush() if passive_updates: sess.expire(u1, ['address']) self.assert_sql_count(testing.db, go, 1) else: self.assert_sql_count(testing.db, go, 2) def go(): sess.flush() self.assert_sql_count(testing.db, go, 0) sess.expunge_all() eq_([Address(username='******')], sess.query(Address).all()) @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE') @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE') def test_bidirectional_passive(self): self._test_bidirectional(True) def test_bidirectional_nonpassive(self): self._test_bidirectional(False) def _test_bidirectional(self, passive_updates): users, Address, addresses, User = (self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users) mapper(Address, addresses, properties={ 'user': relationship(User, passive_updates=passive_updates, backref='addresses') }) sess = create_session() a1 = Address(email='jack1') a2 = Address(email='jack2') u1 = User(username='******', fullname='jack') a1.user = u1 a2.user = u1 sess.add(a1) sess.add(a2) sess.flush() u1.username = '******' (ad1, ad2) = sess.query(Address).all() eq_([Address(username='******'), Address(username='******')], [ad1, ad2]) def go(): sess.flush() if passive_updates: self.assert_sql_count(testing.db, go, 1) else: self.assert_sql_count(testing.db, go, 3) eq_([Address(username='******'), Address(username='******')], [ad1, ad2]) sess.expunge_all() eq_([Address(username='******'), Address(username='******')], sess.query(Address).all()) u1 = sess.query(User).get('ed') assert len(u1.addresses) == 2 # load addresses u1.username = '******' def go(): sess.flush() # check that the passive_updates is on on the other side if passive_updates: self.assert_sql_count(testing.db, go, 1) else: self.assert_sql_count(testing.db, go, 3) sess.expunge_all() eq_([Address(username='******'), Address(username='******')], sess.query(Address).all()) @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE') @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE') def test_manytomany_passive(self): self._test_manytomany(True) # mysqldb executemany() of the association table fails to # report the correct row count @testing.fails_if( lambda: testing.against('mysql') and not testing.against('+zxjdbc')) def test_manytomany_nonpassive(self): self._test_manytomany(False) def _test_manytomany(self, passive_updates): users, items, Item, User, users_to_items = (self.tables.users, self.tables.items, self.classes.Item, self.classes.User, self.tables.users_to_items) mapper(User, users, properties={ 'items': relationship(Item, secondary=users_to_items, backref='users', passive_updates=passive_updates) }) mapper(Item, items) sess = create_session() u1 = User(username='******') u2 = User(username='******') i1 = Item(itemname='item1') i2 = Item(itemname='item2') u1.items.append(i1) u1.items.append(i2) i2.users.append(u2) sess.add(u1) sess.add(u2) sess.flush() r = sess.query(Item).all() # ComparableEntity can't handle a comparison with the backrefs # involved.... eq_(Item(itemname='item1'), r[0]) eq_(['jack'], [u.username for u in r[0].users]) eq_(Item(itemname='item2'), r[1]) eq_(['jack', 'fred'], [u.username for u in r[1].users]) u2.username = '******' def go(): sess.flush() go() def go(): sess.flush() self.assert_sql_count(testing.db, go, 0) sess.expunge_all() r = sess.query(Item).all() eq_(Item(itemname='item1'), r[0]) eq_(['jack'], [u.username for u in r[0].users]) eq_(Item(itemname='item2'), r[1]) eq_(['ed', 'jack'], sorted([u.username for u in r[1].users])) sess.expunge_all() u2 = sess.query(User).get(u2.username) u2.username = '******' sess.flush() r = sess.query(Item).with_parent(u2).all() eq_(Item(itemname='item2'), r[0])
def test_execute_events(self): stmts = [] cursor_stmts = [] def execute(conn, clauseelement, multiparams, params ): stmts.append((str(clauseelement), params, multiparams)) def cursor_execute(conn, cursor, statement, parameters, context, executemany): cursor_stmts.append((str(statement), parameters, None)) for engine in [ engines.testing_engine(options=dict(implicit_returning=False)), engines.testing_engine(options=dict(implicit_returning=False, strategy='threadlocal')) ]: event.listen(engine, 'before_execute', execute) event.listen(engine, 'before_cursor_execute', cursor_execute) m = MetaData(engine) t1 = Table('t1', m, Column('c1', Integer, primary_key=True), Column('c2', String(50), default=func.lower('Foo'), primary_key=True) ) m.create_all() try: t1.insert().execute(c1=5, c2='some data') t1.insert().execute(c1=6) eq_(engine.execute('select * from t1').fetchall(), [(5, 'some data'), (6, 'foo')]) finally: m.drop_all() engine.dispose() compiled = [('CREATE TABLE t1', {}, None), ('INSERT INTO t1 (c1, c2)', {'c2': 'some data', 'c1': 5}, None), ('INSERT INTO t1 (c1, c2)', {'c1': 6}, None), ('select * from t1', {}, None), ('DROP TABLE t1', {}, None)] if not testing.against('oracle+zxjdbc'): # or engine.dialect.pr # eexecute_pk_sequence # s: cursor = [ ('CREATE TABLE t1', {}, ()), ('INSERT INTO t1 (c1, c2)', {'c2': 'some data', 'c1' : 5}, (5, 'some data')), ('SELECT lower', {'lower_2': 'Foo'}, ('Foo', )), ('INSERT INTO t1 (c1, c2)', {'c2': 'foo', 'c1': 6}, (6, 'foo')), ('select * from t1', {}, ()), ('DROP TABLE t1', {}, ()), ] else: insert2_params = 6, 'Foo' if testing.against('oracle+zxjdbc'): insert2_params += (ReturningParam(12), ) cursor = [('CREATE TABLE t1', {}, ()), ('INSERT INTO t1 (c1, c2)', {'c2': 'some data' , 'c1': 5}, (5, 'some data')), ('INSERT INTO t1 (c1, c2)', {'c1': 6, 'lower_2': 'Foo'}, insert2_params), ('select * from t1', {}, ()), ('DROP TABLE t1' , {}, ())] # bind param name 'lower_2' might # be incorrect self._assert_stmts(compiled, stmts) self._assert_stmts(cursor, cursor_stmts)
def test_proxy(self): stmts = [] cursor_stmts = [] class MyProxy(ConnectionProxy): def execute( self, conn, execute, clauseelement, *multiparams, **params ): stmts.append((str(clauseelement), params, multiparams)) return execute(clauseelement, *multiparams, **params) def cursor_execute( self, execute, cursor, statement, parameters, context, executemany, ): cursor_stmts.append((str(statement), parameters, None)) return execute(cursor, statement, parameters, context) def assert_stmts(expected, received): for stmt, params, posn in expected: if not received: assert False while received: teststmt, testparams, testmultiparams = \ received.pop(0) teststmt = re.compile(r'[\n\t ]+', re.M).sub(' ', teststmt).strip() if teststmt.startswith(stmt) and (testparams == params or testparams == posn): break for engine in \ engines.testing_engine(options=dict(implicit_returning=False, proxy=MyProxy())), \ engines.testing_engine(options=dict(implicit_returning=False, proxy=MyProxy(), strategy='threadlocal')): m = MetaData(engine) t1 = Table('t1', m, Column('c1', Integer, primary_key=True), Column('c2', String(50), default=func.lower('Foo'), primary_key=True) ) m.create_all() try: t1.insert().execute(c1=5, c2='some data') t1.insert().execute(c1=6) eq_(engine.execute('select * from t1').fetchall(), [(5, 'some data'), (6, 'foo')]) finally: m.drop_all() engine.dispose() compiled = [('CREATE TABLE t1', {}, None), ('INSERT INTO t1 (c1, c2)', {'c2': 'some data', 'c1': 5}, None), ('INSERT INTO t1 (c1, c2)', {'c1': 6}, None), ('select * from t1', {}, None), ('DROP TABLE t1', {}, None)] if not testing.against('oracle+zxjdbc'): # or engine.dialect.pr # eexecute_pk_sequence # s: cursor = [ ('CREATE TABLE t1', {}, ()), ('INSERT INTO t1 (c1, c2)', {'c2': 'some data', 'c1' : 5}, (5, 'some data')), ('SELECT lower', {'lower_2': 'Foo'}, ('Foo', )), ('INSERT INTO t1 (c1, c2)', {'c2': 'foo', 'c1': 6}, (6, 'foo')), ('select * from t1', {}, ()), ('DROP TABLE t1', {}, ()), ] else: insert2_params = 6, 'Foo' if testing.against('oracle+zxjdbc'): insert2_params += (ReturningParam(12), ) cursor = [('CREATE TABLE t1', {}, ()), ('INSERT INTO t1 (c1, c2)', {'c2': 'some data' , 'c1': 5}, (5, 'some data')), ('INSERT INTO t1 (c1, c2)', {'c1': 6, 'lower_2': 'Foo'}, insert2_params), ('select * from t1', {}, ()), ('DROP TABLE t1' , {}, ())] # bind param name 'lower_2' might # be incorrect assert_stmts(compiled, stmts) assert_stmts(cursor, cursor_stmts)
def setup_class(cls): global t, f, f2, ts, currenttime, metadata, default_generator db = testing.db metadata = MetaData(db) default_generator = {'x':50} def mydefault(): default_generator['x'] += 1 return default_generator['x'] def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text('13')])).scalar() def mydefault_using_connection(ctx): conn = ctx.connection try: return conn.execute(sa.select([sa.text('12')])).scalar() finally: # ensure a "close()" on this connection does nothing, # since its a "branched" connection conn.close() use_function_defaults = testing.against('postgresql', 'mssql', 'maxdb') is_oracle = testing.against('oracle') # select "count(1)" returns different results on different DBs also # correct for "current_date" compatible as column default, value # differences currenttime = func.current_date(type_=sa.Date, bind=db) if is_oracle: ts = db.scalar(sa.select([func.trunc(func.sysdate(), sa.literal_column("'DAY'"), type_=sa.Date).label('today')])) assert isinstance(ts, datetime.date) and not isinstance(ts, datetime.datetime) f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() # TODO: engine propigation across nested functions not working currenttime = func.trunc(currenttime, sa.literal_column("'DAY'"), bind=db, type_=sa.Date) def1 = currenttime def2 = func.trunc(sa.text("sysdate"), sa.literal_column("'DAY'"), type_=sa.Date) deftype = sa.Date elif use_function_defaults: f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date if testing.against('maxdb'): def2 = sa.text("curdate") elif testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") ts = db.scalar(func.current_date()) else: f = len('abcdef') f2 = len('abcdefghijk') def1 = def2 = "3" ts = 3 deftype = Integer t = Table('default_test1', metadata, # python function Column('col1', Integer, primary_key=True, default=mydefault), # python literal Column('col2', String(20), default="imthedefault", onupdate="im the update"), # preexecute expression Column('col3', Integer, default=func.length('abcdef'), onupdate=func.length('abcdefghijk')), # SQL-side default from sql expression Column('col4', deftype, server_default=def1), # SQL-side default from literal expression Column('col5', deftype, server_default=def2), # preexecute + update timestamp Column('col6', sa.Date, default=currenttime, onupdate=currenttime), Column('boolcol1', sa.Boolean, default=True), Column('boolcol2', sa.Boolean, default=False), # python function which uses ExecutionContext Column('col7', Integer, default=mydefault_using_connection, onupdate=myupdate_with_ctx), # python builtin Column('col8', sa.Date, default=datetime.date.today, onupdate=datetime.date.today), # combo Column('col9', String(20), default='py', server_default='ddl')) t.create()