def test_reflect_nvarchar(self): metadata = self.metadata Table( "tnv", metadata, Column("nv_data", sqltypes.NVARCHAR(255)), Column("c_data", sqltypes.NCHAR(20)), ) metadata.create_all() m2 = MetaData(testing.db) t2 = Table("tnv", m2, autoload=True) assert isinstance(t2.c.nv_data.type, sqltypes.NVARCHAR) assert isinstance(t2.c.c_data.type, sqltypes.NCHAR) if testing.against("oracle+cx_oracle"): assert isinstance( t2.c.nv_data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleUnicodeStringNCHAR, ) assert isinstance( t2.c.c_data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleNChar, ) data = u("m’a réveillé.") with testing.db.connect() as conn: conn.execute(t2.insert(), dict(nv_data=data, c_data=data)) nv_data, c_data = conn.execute(t2.select()).first() eq_(nv_data, data) eq_(c_data, data + (" " * 7)) # char is space padded assert isinstance(nv_data, util.text_type) assert isinstance(c_data, util.text_type)
def _test_lastrow_accessor(self, table_, values, assertvalues): """Tests the inserted_primary_key and lastrow_has_id() functions.""" def insert_values(engine, table_, values): """ Inserts a row into a table, returns the full list of values INSERTed including defaults that fired off on the DB side and detects rows that had defaults and post-fetches. """ # verify implicit_returning is working if engine.dialect.implicit_returning: ins = table_.insert() comp = ins.compile(engine, column_keys=list(values)) if not set(values).issuperset( c.key for c in table_.primary_key ): is_(bool(comp.returning), True) result = engine.execute(table_.insert(), **values) ret = values.copy() for col, id_ in zip( table_.primary_key, result.inserted_primary_key ): ret[col.key] = id_ if result.lastrow_has_defaults(): criterion = and_( *[ col == id_ for col, id_ in zip( table_.primary_key, result.inserted_primary_key ) ] ) row = engine.execute(table_.select(criterion)).first() for c in table_.c: ret[c.key] = row[c] return ret if testing.against("firebird", "postgresql", "oracle", "mssql"): assert testing.db.dialect.implicit_returning if testing.db.dialect.implicit_returning: test_engines = [ engines.testing_engine(options={"implicit_returning": False}), engines.testing_engine(options={"implicit_returning": True}), ] else: test_engines = [testing.db] for engine in test_engines: try: table_.create(bind=engine, checkfirst=True) i = insert_values(engine, table_, values) eq_(i, assertvalues) finally: table_.drop(bind=engine)
def test_year_types(self): specs = [ (mysql.YEAR(), mysql.YEAR(display_width=4)), (mysql.YEAR(display_width=4), mysql.YEAR(display_width=4)), ] if testing.against("mysql>=8.0.19"): self._run_test(specs, []) else: self._run_test(specs, ["display_width"])
def test_outer_joinedload_w_limit(self): User = self.classes.User sess = Session() q = sess.query(User).options( joinedload(User.addresses, innerjoin=False) ) if testing.against("postgresql"): q = q.with_for_update(of=User) else: q = q.with_for_update() q = q.limit(1) if testing.against("oracle"): assert_raises_message(exc.DatabaseError, "ORA-02014", q.all) else: q.all() sess.close()
def test_literal_returning(self): if testing.against("postgresql"): literal_true = "true" else: literal_true = "1" result4 = testing.db.execute( 'insert into tables (id, persons, "full") ' "values (5, 10, %s) returning persons" % literal_true) eq_([dict(row) for row in result4], [{"persons": 10}])
def test_integer_types(self): specs = [] for type_ in [ mysql.TINYINT, mysql.SMALLINT, mysql.MEDIUMINT, mysql.INTEGER, mysql.BIGINT, ]: for display_width in [None, 4, 7]: for unsigned in [False, True]: for zerofill in [None, True]: kw = {} if display_width: kw["display_width"] = display_width if unsigned is not None: kw["unsigned"] = unsigned if zerofill is not None: kw["zerofill"] = zerofill zerofill = bool(zerofill) source_type = type_(**kw) if display_width is None: display_width = { mysql.MEDIUMINT: 9, mysql.SMALLINT: 6, mysql.TINYINT: 4, mysql.INTEGER: 11, mysql.BIGINT: 20, }[type_] if zerofill: unsigned = True expected_type = type_( display_width=display_width, unsigned=unsigned, zerofill=zerofill, ) specs.append((source_type, expected_type)) specs.extend([ (SmallInteger(), mysql.SMALLINT(display_width=6)), (Integer(), mysql.INTEGER(display_width=11)), (BigInteger, mysql.BIGINT(display_width=20)), ]) # TODO: mysql 8.0.19-ish doesn't consistently report # on display_width. need to test this more accurately though # for the cases where it does if testing.against("mysql >= 8.0.19"): self._run_test(specs, ["unsigned", "zerofill"]) else: self._run_test(specs, ["display_width", "unsigned", "zerofill"])
def test_limit(self): """test limit operations combined with lazy-load relationships.""" ( users, items, order_items, orders, Item, User, Address, Order, addresses, ) = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) mapper(Item, items) mapper( Order, orders, properties={ "items": relationship(Item, secondary=order_items, lazy="select") }, ) mapper( User, users, properties={ "addresses": relationship(mapper(Address, addresses), lazy="select"), "orders": relationship(Order, lazy="select"), }, ) sess = create_session() q = sess.query(User) if testing.against("mssql"): result = q.limit(2).all() assert self.static.user_all_result[:2] == result else: result = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == result
def get_objects_skipping_sqlite_issue(): # pysqlite keeps adding weakref objects which only # get reset after 220 iterations. We'd like to keep these # tests under 50 iterations and ideally about ten, so # just filter them out so that we get a "flatline" more quickly. if testing.against("sqlite+pysqlite"): return [ o for o in gc.get_objects() if not isinstance(o, weakref.ref) ] else: return gc.get_objects()
def test_outer_joinedload_wo_limit(self): User = self.classes.User sess = Session() q = sess.query(User).options( joinedload(User.addresses, innerjoin=False) ) if testing.against("postgresql"): q = q.with_for_update(of=User) else: q = q.with_for_update() q.all() sess.close()
def define_tables(cls, metadata): if testing.against("oracle"): fk_args = dict(deferrable=True, initially="deferred") elif testing.against("mysql"): fk_args = {} else: fk_args = dict(onupdate="cascade") Table( "users", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), ) Table( "addresses", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), Column("user_id", Integer, ForeignKey("users.id", **fk_args)), )
def test_int_default_none_on_insert(self, connection): metadata = self.metadata t = Table( "x", metadata, Column("y", Integer, server_default="5", primary_key=True), Column("data", String(10)), implicit_returning=False, ) assert t._autoincrement_column is None metadata.create_all(connection) r = connection.execute(t.insert(), dict(data="data")) eq_(r.inserted_primary_key, [None]) if testing.against("sqlite"): eq_(list(connection.execute(t.select())), [(1, "data")]) else: eq_(list(connection.execute(t.select())), [(5, "data")])
def test_reflect_unicode_no_nvarchar(self): metadata = self.metadata Table("tnv", metadata, Column("data", sqltypes.Unicode(255))) metadata.create_all() m2 = MetaData(testing.db) t2 = Table("tnv", m2, autoload=True) assert isinstance(t2.c.data.type, sqltypes.VARCHAR) if testing.against("oracle+cx_oracle"): assert isinstance( t2.c.data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleString, ) data = u("m’a réveillé.") t2.insert().execute(data=data) res = t2.select().execute().first()["data"] eq_(res, data) assert isinstance(res, util.text_type)
def test_int_default_none_on_insert_reflected(self, connection): metadata = self.metadata Table( "x", metadata, Column("y", Integer, server_default="5", primary_key=True), Column("data", String(10)), implicit_returning=False, ) metadata.create_all(connection) m2 = MetaData() t2 = Table("x", m2, autoload_with=connection, implicit_returning=False) r = connection.execute(t2.insert(), dict(data="data")) eq_(r.inserted_primary_key, [None]) if testing.against("sqlite"): eq_(list(connection.execute(t2.select())), [(1, "data")]) else: eq_(list(connection.execute(t2.select())), [(5, "data")])
def test_text_doesnt_explode(self): for s in [ select([ case( [(info_table.c.info == "pk_4_data", text("'yes'"))], else_=text("'no'"), ) ]).order_by(info_table.c.info), select([ case( [( info_table.c.info == "pk_4_data", literal_column("'yes'"), )], else_=literal_column("'no'"), ) ]).order_by(info_table.c.info), ]: if testing.against("firebird"): eq_( s.execute().fetchall(), [ ("no ", ), ("no ", ), ("no ", ), ("yes", ), ("no ", ), ("no ", ), ], ) else: eq_( s.execute().fetchall(), [("no", ), ("no", ), ("no", ), ("yes", ), ("no", ), ("no", )], )
def setup_class(cls): global metadata, t1, t2, t3 metadata = MetaData(testing.db) t1 = Table( u("unitable1"), metadata, Column(u("méil"), Integer, primary_key=True), Column(ue("\u6e2c\u8a66"), Integer), test_needs_fk=True, ) t2 = Table( u("Unitéble2"), metadata, Column(u("méil"), Integer, primary_key=True, key="a"), Column( ue("\u6e2c\u8a66"), Integer, ForeignKey(u("unitable1.méil")), key="b", ), test_needs_fk=True, ) # Few DBs support Unicode foreign keys if testing.against("sqlite"): t3 = Table( ue("\u6e2c\u8a66"), metadata, Column( ue("\u6e2c\u8a66_id"), Integer, primary_key=True, autoincrement=False, ), Column( ue("unitable1_\u6e2c\u8a66"), Integer, ForeignKey(ue("unitable1.\u6e2c\u8a66")), ), Column(u("Unitéble2_b"), Integer, ForeignKey(u("Unitéble2.b"))), Column( ue("\u6e2c\u8a66_self"), Integer, ForeignKey(ue("\u6e2c\u8a66.\u6e2c\u8a66_id")), ), test_needs_fk=True, ) else: t3 = Table( ue("\u6e2c\u8a66"), metadata, Column( ue("\u6e2c\u8a66_id"), Integer, primary_key=True, autoincrement=False, ), Column(ue("unitable1_\u6e2c\u8a66"), Integer), Column(u("Unitéble2_b"), Integer), Column(ue("\u6e2c\u8a66_self"), Integer), test_needs_fk=True, ) metadata.create_all()
def define_tables(cls, metadata): default_generator = cls.default_generator = {"x": 50} def mydefault(): default_generator["x"] += 1 return default_generator["x"] def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text("13")])).scalar() def mydefault_using_connection(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text("12")])).scalar() use_function_defaults = testing.against("postgresql", "mssql") is_oracle = testing.against("oracle") class MyClass(object): @classmethod def gen_default(cls, ctx): return "hi" class MyType(TypeDecorator): impl = String(50) def process_bind_param(self, value, dialect): if value is not None: value = "BIND" + value return value cls.f = 6 cls.f2 = 11 with testing.db.connect() as conn: currenttime = cls.currenttime = func.current_date(type_=sa.Date) if is_oracle: ts = conn.scalar( sa.select([ func.trunc( func.current_timestamp(), sa.literal_column("'DAY'"), type_=sa.Date, ) ])) currenttime = cls.currenttime = func.trunc( currenttime, sa.literal_column("'DAY'"), type_=sa.Date) def1 = currenttime def2 = func.trunc( sa.text("current_timestamp"), sa.literal_column("'DAY'"), type_=sa.Date, ) deftype = sa.Date elif use_function_defaults: def1 = currenttime deftype = sa.Date if testing.against("mssql"): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") ts = conn.scalar(func.current_date()) else: def1 = def2 = "3" ts = 3 deftype = Integer cls.ts = ts Table( "default_test", metadata, # python function Column("col1", Integer, primary_key=True, default=mydefault), # python literal Column( "col2", String(20), default="imthedefault", onupdate="im the update", ), # preexecute expression Column( "col3", Integer, default=func.length("abcdef"), onupdate=func.length("abcdefghijk"), ), # SQL-side default from sql expression Column("col4", deftype, server_default=def1), # SQL-side default from literal expression Column("col5", deftype, server_default=def2), # preexecute + update timestamp Column("col6", sa.Date, default=currenttime, onupdate=currenttime), Column("boolcol1", sa.Boolean, default=True), Column("boolcol2", sa.Boolean, default=False), # python function which uses ExecutionContext Column( "col7", Integer, default=mydefault_using_connection, onupdate=myupdate_with_ctx, ), # python builtin Column( "col8", sa.Date, default=datetime.date.today, onupdate=datetime.date.today, ), # combo Column("col9", String(20), default="py", server_default="ddl"), # python method w/ context Column("col10", String(20), default=MyClass.gen_default), # fixed default w/ type that has bound processor Column("col11", MyType(), default="foo"), )
class GenerativeQueryTest(fixtures.MappedTest): run_inserts = "once" run_deletes = None @classmethod def define_tables(cls, metadata): Table( "foo", metadata, Column("id", Integer, sa.Sequence("foo_id_seq"), primary_key=True), Column("bar", Integer), Column("range", Integer), ) @classmethod def fixtures(cls): rows = tuple([(i, i % 10) for i in range(100)]) foo_data = (("bar", "range"), ) + rows return dict(foo=foo_data) @classmethod def setup_mappers(cls): foo = cls.tables.foo class Foo(cls.Basic): pass mapper(Foo, foo) def test_selectby(self): Foo = self.classes.Foo res = create_session().query(Foo).filter_by(range=5) assert res.order_by(Foo.bar)[0].bar == 5 assert res.order_by(sa.desc(Foo.bar))[0].bar == 95 def test_slice(self): Foo = self.classes.Foo sess = create_session() query = sess.query(Foo).order_by(Foo.id) orig = query.all() assert query[1] == orig[1] assert query[-4] == orig[-4] assert query[-1] == orig[-1] assert list(query[10:20]) == orig[10:20] assert list(query[10:]) == orig[10:] assert list(query[:10]) == orig[:10] assert list(query[:10]) == orig[:10] assert list(query[5:5]) == orig[5:5] assert list(query[10:40:3]) == orig[10:40:3] assert list(query[-5:]) == orig[-5:] assert list(query[-2:-5]) == orig[-2:-5] assert list(query[-5:-2]) == orig[-5:-2] assert list(query[:-2]) == orig[:-2] assert query[10:20][5] == orig[10:20][5] def test_aggregate(self): foo, Foo = self.tables.foo, self.classes.Foo sess = create_session() query = sess.query(Foo) assert query.count() == 100 assert sess.query(func.min( foo.c.bar)).filter(foo.c.bar < 30).one() == (0, ) assert sess.query(func.max( foo.c.bar)).filter(foo.c.bar < 30).one() == (29, ) assert (next( query.filter(foo.c.bar < 30).values(sa.func.max( foo.c.bar)))[0] == 29) assert (next( query.filter(foo.c.bar < 30).values(sa.func.max( foo.c.bar)))[0] == 29) @testing.fails_if( lambda: testing.against("mysql+mysqldb") and testing.db.dialect.dbapi. version_info[:4] == (1, 2, 1, "gamma"), "unknown incompatibility", ) def test_aggregate_1(self): foo = self.tables.foo query = create_session().query(func.sum(foo.c.bar)) assert query.filter(foo.c.bar < 30).one() == (435, ) @testing.fails_on("firebird", "FIXME: unknown") @testing.fails_on( "mssql", "AVG produces an average as the original column type on mssql.", ) def test_aggregate_2(self): foo = self.tables.foo query = create_session().query(func.avg(foo.c.bar)) avg = query.filter(foo.c.bar < 30).one()[0] eq_(float(round(avg, 1)), 14.5) @testing.fails_on( "mssql", "AVG produces an average as the original column type on mssql.", ) def test_aggregate_3(self): foo, Foo = self.tables.foo, self.classes.Foo query = create_session().query(Foo) avg_f = next( query.filter(foo.c.bar < 30).values(sa.func.avg(foo.c.bar)))[0] assert float(round(avg_f, 1)) == 14.5 avg_o = next( query.filter(foo.c.bar < 30).values(sa.func.avg(foo.c.bar)))[0] assert float(round(avg_o, 1)) == 14.5 def test_filter(self): Foo = self.classes.Foo query = create_session().query(Foo) assert query.count() == 100 assert query.filter(Foo.bar < 30).count() == 30 res2 = query.filter(Foo.bar < 30).filter(Foo.bar > 10) assert res2.count() == 19 def test_order_by(self): Foo = self.classes.Foo query = create_session().query(Foo) assert query.order_by(Foo.bar)[0].bar == 0 assert query.order_by(sa.desc(Foo.bar))[0].bar == 99 def test_offset_order_by(self): Foo = self.classes.Foo query = create_session().query(Foo) assert list(query.order_by(Foo.bar).offset(10))[0].bar == 10 def test_offset(self): Foo = self.classes.Foo query = create_session().query(Foo) assert len(list(query.limit(10))) == 10