def test_select(self): t = Table("t", MetaData(), Column("x", Integer)) s = t.select() is_(inspect(s), s) assert s.is_selectable is_(s.selectable, s)
def test_boolean_roundtrip_reflected(self, boolean_table, store, expected): meta2 = MetaData(testing.db) table = Table("mysql_bool", meta2, autoload=True) eq_(colspec(table.c.b3), "b3 TINYINT(1)") eq_regex(colspec(table.c.b4), r"b4 TINYINT(?:\(1\))? UNSIGNED") meta2 = MetaData(testing.db) table = Table( "mysql_bool", meta2, Column("b1", BOOLEAN), Column("b2", Boolean), Column("b3", BOOLEAN), Column("b4", BOOLEAN), autoload=True, ) eq_(colspec(table.c.b3), "b3 BOOL") eq_(colspec(table.c.b4), "b4 BOOL") with testing.db.connect() as conn: expected = expected or store conn.execute(table.insert(store)) row = conn.execute(table.select()).first() eq_(list(row), expected) for i, val in enumerate(expected): if isinstance(val, bool): self.assert_(val is row[i]) conn.execute(table.delete())
def test_preexecute_passivedefault(self): """test that when we get a primary key column back from reflecting a table which has a default value on it, we pre- execute that DefaultClause upon insert.""" try: meta = MetaData(testing.db) testing.db.execute( """ CREATE TABLE speedy_users ( speedy_user_id SERIAL PRIMARY KEY, user_name VARCHAR NOT NULL, user_password VARCHAR NOT NULL ); """ ) t = Table("speedy_users", meta, autoload=True) r = t.insert().execute(user_name="user", user_password="******") assert r.inserted_primary_key == [1] result = t.select().execute().fetchall() assert result == [(1, "user", "lala")] finally: testing.db.execute("drop table speedy_users")
def test_subquery_four(self): # Not lower case names, quotes off, should not quote metadata = MetaData() t1 = Table( "T1", metadata, Column("Col1", Integer, quote=False), schema="Foo", quote=False, quote_schema=False, ) a = t1.select().alias("Anon") b = select([1], a.c.Col1 == 2, from_obj=a) self.assert_compile( b, "SELECT 1 " "FROM (" "SELECT " "Foo.T1.Col1 AS Col1 " "FROM " "Foo.T1" ') AS "Anon" ' "WHERE " '"Anon".Col1 = :Col1_1', )
def test_unicode_enum(self): metadata = self.metadata t1 = Table( "table", metadata, Column("id", Integer, primary_key=True), Column("value", Enum(u("réveillé"), u("drôle"), u("S’il"))), Column("value2", mysql.ENUM(u("réveillé"), u("drôle"), u("S’il"))), ) metadata.create_all() t1.insert().execute(value=u("drôle"), value2=u("drôle")) t1.insert().execute(value=u("réveillé"), value2=u("réveillé")) t1.insert().execute(value=u("S’il"), value2=u("S’il")) eq_( t1.select().order_by(t1.c.id).execute().fetchall(), [ (1, u("drôle"), u("drôle")), (2, u("réveillé"), u("réveillé")), (3, u("S’il"), u("S’il")), ], ) # test reflection of the enum labels m2 = MetaData(testing.db) t2 = Table("table", m2, autoload=True) # TODO: what's wrong with the last element ? is there # latin-1 stuff forcing its way in ? eq_(t2.c.value.type.enums[0:2], [u("réveillé"), u("drôle")]) # u'S’il') # eh ? eq_(t2.c.value2.type.enums[0:2], [u("réveillé"), u("drôle")]) # u'S’il') # eh ?
def test_empty_set_no_empty_string(self): t = Table( "t", self.metadata, Column("id", Integer), Column("data", mysql.SET("a", "b")), ) t.create() with testing.db.begin() as conn: conn.execute( t.insert(), { "id": 1, "data": set() }, { "id": 2, "data": set([""]) }, { "id": 3, "data": set(["a", ""]) }, { "id": 4, "data": set(["b"]) }, ) eq_( conn.execute(t.select().order_by(t.c.id)).fetchall(), [(1, set()), (2, set()), (3, set(["a"])), (4, set(["b"]))], )
def test_delete_schema_legacy(self): meta = self.metadata eng = engines.testing_engine(options=dict(legacy_schema_aliasing=True)) meta.bind = eng con = eng.connect() con.execute("create schema paj") @event.listens_for(meta, "after_drop") def cleanup(target, connection, **kw): connection.execute("drop schema paj") tbl = Table( "test", meta, Column("id", Integer, primary_key=True), schema="paj" ) tbl.create() tbl.insert().execute({"id": 1}) eq_(tbl.select().scalar(), 1) tbl.delete(tbl.c.id == 1).execute() eq_(tbl.select().scalar(), None)
def test_bit_50_roundtrip_reflected(self, bit_table, store, expected): meta2 = MetaData() bit_table = Table("mysql_bits", meta2, autoload_with=testing.db) with testing.db.connect() as conn: expected = expected or store conn.execute(bit_table.insert(store)) row = conn.execute(bit_table.select()).first() eq_(list(row), expected) conn.execute(bit_table.delete())
def test_select_with_nolock_schema(self): m = MetaData() t = Table("sometable", m, Column("somecolumn", Integer), schema="test_schema") self.assert_compile( t.select().with_hint(t, "WITH (NOLOCK)"), "SELECT test_schema.sometable.somecolumn " "FROM test_schema.sometable WITH (NOLOCK)", )
def test_native_odbc_execute(self): t1 = Table("t1", MetaData(), Column("c1", Integer)) dbapi = mock_dbapi() engine = engines.testing_engine( "mssql+mxodbc://localhost", options={"module": dbapi, "_initialize": False}, ) conn = engine.connect() # crud: uses execute conn.execute(t1.insert().values(c1="foo")) conn.execute(t1.delete().where(t1.c.c1 == "foo")) conn.execute(t1.update().where(t1.c.c1 == "foo").values(c1="bar")) # select: uses executedirect conn.execute(t1.select()) # manual flagging conn.execution_options(native_odbc_execute=True).execute(t1.select()) conn.execution_options(native_odbc_execute=False).execute( t1.insert().values(c1="foo") ) eq_( # fmt: off [ c[2] for c in dbapi.connect.return_value.cursor. return_value.execute.mock_calls ], # fmt: on [ {"direct": True}, {"direct": True}, {"direct": True}, {"direct": True}, {"direct": False}, {"direct": True}, ] )
def test_apply_labels_should_quote(self): # Not lower case names, should quote metadata = MetaData() t1 = Table("T1", metadata, Column("Col1", Integer), schema="Foo") self.assert_compile( t1.select().apply_labels(), "SELECT " '"Foo"."T1"."Col1" AS "Foo_T1_Col1" ' "FROM " '"Foo"."T1"', )
def test_compiled_insert(self): table = Table( "testtable", self.metadata, Column("id", Integer, primary_key=True), Column("data", String(30)), ) self.metadata.create_all() ins = table.insert(inline=True, values={ "data": bindparam("x") }).compile() ins.execute({"x": "five"}, {"x": "seven"}) eq_(table.select().execute().fetchall(), [(1, "five"), (2, "seven")])
def test_schema_roundtrips(self): meta = self.metadata users = Table( "users", meta, Column("id", Integer, primary_key=True), Column("name", String(50)), schema="test_schema", ) users.create() users.insert().execute(id=1, name="name1") users.insert().execute(id=2, name="name2") users.insert().execute(id=3, name="name3") users.insert().execute(id=4, name="name4") eq_( users.select().where(users.c.name == "name2").execute().fetchall(), [(2, "name2")], ) eq_( users.select(use_labels=True) .where(users.c.name == "name2") .execute() .fetchall(), [(2, "name2")], ) users.delete().where(users.c.id == 3).execute() eq_( users.select().where(users.c.name == "name3").execute().fetchall(), [], ) users.update().where(users.c.name == "name4").execute(name="newname") eq_( users.select(use_labels=True) .where(users.c.id == 4) .execute() .fetchall(), [(4, "newname")], )
def test_unicode_roundtrip(self): set_table = Table( "t", self.metadata, Column("id", Integer, primary_key=True), Column("data", mysql.SET(u("réveillé"), u("drôle"), u("S’il"))), ) set_table.create() with testing.db.begin() as conn: conn.execute( set_table.insert(), {"data": set([u("réveillé"), u("drôle")])}) row = conn.execute(set_table.select()).first() eq_(row, (1, set([u("réveillé"), u("drôle")])))
def test_subquery_one(self): # Lower case names, should not quote metadata = MetaData() t1 = Table("t1", metadata, Column("col1", Integer), schema="foo") a = t1.select().alias("anon") b = select([1], a.c.col1 == 2, from_obj=a) self.assert_compile( b, "SELECT 1 " "FROM (" "SELECT " "foo.t1.col1 AS col1 " "FROM " "foo.t1" ") AS anon " "WHERE anon.col1 = :col1_1", )
def test_apply_labels_shouldnt_quote(self): # Not lower case names, quotes off metadata = MetaData() t1 = Table( "T1", metadata, Column("Col1", Integer, quote=False), schema="Foo", quote=False, quote_schema=False, ) # TODO: is this what we really want here ? # what if table/schema *are* quoted? self.assert_compile( t1.select().apply_labels(), "SELECT " "Foo.T1.Col1 AS Foo_T1_Col1 " "FROM " "Foo.T1", )
def test_subquery_three(self): # Not lower case names, should quote metadata = MetaData() t1 = Table("T1", metadata, Column("Col1", Integer), schema="Foo") a = t1.select().alias("Anon") b = select([1], a.c.Col1 == 2, from_obj=a) self.assert_compile( b, "SELECT 1 " "FROM (" "SELECT " '"Foo"."T1"."Col1" AS "Col1" ' "FROM " '"Foo"."T1"' ') AS "Anon" ' "WHERE " '"Anon"."Col1" = :Col1_1', )
def test_timestamp_nullable(self, type_): ts_table = Table( "mysql_timestamp", self.metadata, Column("t1", type_), Column("t2", type_, nullable=False), mysql_engine="InnoDB", ) self.metadata.create_all() # TIMESTAMP without NULL inserts current time when passed # NULL. when not passed, generates 0000-00-00 quite # annoyingly. # the flag http://dev.mysql.com/doc/refman/5.6/en/\ # server-system-variables.html#sysvar_explicit_defaults_for_timestamp # changes this for 5.6 if set. # normalize dates for the amount of time the operation took def normalize(dt): if dt is None: return None elif now <= dt <= new_now: return now else: return dt with testing.db.begin() as conn: now = conn.scalar("select now()") conn.execute(ts_table.insert(), {"t1": now, "t2": None}) conn.execute(ts_table.insert(), {"t1": None, "t2": None}) conn.execute(ts_table.insert(), {"t2": None}) new_now = conn.scalar("select now()") eq_( [ tuple([normalize(dt) for dt in row]) for row in conn.execute(ts_table.select()) ], [(now, now), (None, now), (None, now)], )
def test_convert_unicode(self): meta = self.metadata t1 = Table( "unitest_table", meta, Column("id", Integer, primary_key=True), Column("descr", mssql.MSText()), ) meta.create_all() with testing.db.connect() as con: con.execute( ue( "insert into unitest_table values ('abc \xc3\xa9 def')" ).encode("UTF-8") ) r = con.execute(t1.select()).first() assert isinstance( r[1], util.text_type ), "%s is %s instead of unicode, working on %s" % ( r[1], type(r[1]), meta.bind, ) eq_(r[1], util.ue("abc \xc3\xa9 def"))
def test_subquery_two(self): # Lower case names, quotes on, should quote metadata = MetaData() t1 = Table( "t1", metadata, Column("col1", Integer, quote=True), schema="foo", quote=True, quote_schema=True, ) a = t1.select().alias("anon") b = select([1], a.c.col1 == 2, from_obj=a) self.assert_compile( b, "SELECT 1 " "FROM (" "SELECT " '"foo"."t1"."col1" AS "col1" ' "FROM " '"foo"."t1"' ") AS anon " 'WHERE anon."col1" = :col1_1', )
def _assert_data_autoincrement_returning(self, table): engine = engines.testing_engine(options={"implicit_returning": True}) with self.sql_execution_asserter(engine) as asserter: with engine.connect() as conn: # execute with explicit id r = conn.execute(table.insert(), {"id": 30, "data": "d1"}) eq_(r.inserted_primary_key, [30]) # execute with prefetch id r = conn.execute(table.insert(), {"data": "d2"}) eq_(r.inserted_primary_key, [1]) # executemany with explicit ids conn.execute( table.insert(), { "id": 31, "data": "d3" }, { "id": 32, "data": "d4" }, ) # executemany, uses SERIAL conn.execute(table.insert(), {"data": "d5"}, {"data": "d6"}) # single execute, explicit id, inline conn.execute(table.insert(inline=True), { "id": 33, "data": "d7" }) # single execute, inline, uses SERIAL conn.execute(table.insert(inline=True), {"data": "d8"}) asserter.assert_( DialectSQL( "INSERT INTO testtable (id, data) VALUES (:id, :data)", { "id": 30, "data": "d1" }, ), DialectSQL( "INSERT INTO testtable (data) VALUES (:data) RETURNING " "testtable.id", {"data": "d2"}, ), DialectSQL( "INSERT INTO testtable (id, data) VALUES (:id, :data)", [{ "id": 31, "data": "d3" }, { "id": 32, "data": "d4" }], ), DialectSQL( "INSERT INTO testtable (data) VALUES (:data)", [{ "data": "d5" }, { "data": "d6" }], ), DialectSQL( "INSERT INTO testtable (id, data) VALUES (:id, :data)", [{ "id": 33, "data": "d7" }], ), DialectSQL("INSERT INTO testtable (data) VALUES (:data)", [{ "data": "d8" }]), ) with engine.connect() as conn: eq_( conn.execute(table.select()).fetchall(), [ (30, "d1"), (1, "d2"), (31, "d3"), (32, "d4"), (2, "d5"), (3, "d6"), (33, "d7"), (4, "d8"), ], ) conn.execute(table.delete()) # test the same series of events using a reflected version of # the table m2 = MetaData(engine) table = Table(table.name, m2, autoload=True) with self.sql_execution_asserter(engine) as asserter: with engine.connect() as conn: conn.execute(table.insert(), {"id": 30, "data": "d1"}) r = conn.execute(table.insert(), {"data": "d2"}) eq_(r.inserted_primary_key, [5]) conn.execute( table.insert(), { "id": 31, "data": "d3" }, { "id": 32, "data": "d4" }, ) conn.execute(table.insert(), {"data": "d5"}, {"data": "d6"}) conn.execute(table.insert(inline=True), { "id": 33, "data": "d7" }) conn.execute(table.insert(inline=True), {"data": "d8"}) asserter.assert_( DialectSQL( "INSERT INTO testtable (id, data) VALUES (:id, :data)", { "id": 30, "data": "d1" }, ), DialectSQL( "INSERT INTO testtable (data) VALUES (:data) RETURNING " "testtable.id", {"data": "d2"}, ), DialectSQL( "INSERT INTO testtable (id, data) VALUES (:id, :data)", [{ "id": 31, "data": "d3" }, { "id": 32, "data": "d4" }], ), DialectSQL( "INSERT INTO testtable (data) VALUES (:data)", [{ "data": "d5" }, { "data": "d6" }], ), DialectSQL( "INSERT INTO testtable (id, data) VALUES (:id, :data)", [{ "id": 33, "data": "d7" }], ), DialectSQL("INSERT INTO testtable (data) VALUES (:data)", [{ "data": "d8" }]), ) with engine.connect() as conn: eq_( conn.execute(table.select()).fetchall(), [ (30, "d1"), (5, "d2"), (31, "d3"), (32, "d4"), (6, "d5"), (7, "d6"), (33, "d7"), (8, "d8"), ], ) conn.execute(table.delete())
def test_enum(self): """Exercise the ENUM type.""" with testing.expect_deprecated("Manually quoting ENUM value literals"): e1, e2 = mysql.ENUM("'a'", "'b'"), mysql.ENUM("'a'", "'b'") e3 = mysql.ENUM("'a'", "'b'", strict=True) e4 = mysql.ENUM("'a'", "'b'", strict=True) enum_table = Table( "mysql_enum", self.metadata, Column("e1", e1), Column("e2", e2, nullable=False), Column( "e2generic", Enum("a", "b", validate_strings=True), nullable=False, ), Column("e3", e3), Column("e4", e4, nullable=False), Column("e5", mysql.ENUM("a", "b")), Column("e5generic", Enum("a", "b")), Column("e6", mysql.ENUM("'a'", "b")), Column( "e7", mysql.ENUM( EnumSetTest.SomeEnum, values_callable=EnumSetTest.get_enum_string_values, ), ), Column("e8", mysql.ENUM(EnumSetTest.SomeEnum)), ) eq_(colspec(enum_table.c.e1), "e1 ENUM('a','b')") eq_(colspec(enum_table.c.e2), "e2 ENUM('a','b') NOT NULL") eq_(colspec(enum_table.c.e2generic), "e2generic ENUM('a','b') NOT NULL") eq_(colspec(enum_table.c.e3), "e3 ENUM('a','b')") eq_(colspec(enum_table.c.e4), "e4 ENUM('a','b') NOT NULL") eq_(colspec(enum_table.c.e5), "e5 ENUM('a','b')") eq_(colspec(enum_table.c.e5generic), "e5generic ENUM('a','b')") eq_(colspec(enum_table.c.e6), "e6 ENUM('''a''','b')") eq_(colspec(enum_table.c.e7), "e7 ENUM('1','2','3','a','b')") eq_( colspec(enum_table.c.e8), "e8 ENUM('one','two','three','AMember','BMember')", ) enum_table.create() assert_raises( exc.DBAPIError, enum_table.insert().execute, e1=None, e2=None, e3=None, e4=None, ) assert enum_table.c.e2generic.type.validate_strings assert_raises( exc.StatementError, enum_table.insert().execute, e1="c", e2="c", e2generic="c", e3="c", e4="c", e5="c", e5generic="c", e6="c", e7="c", e8="c", ) enum_table.insert().execute() enum_table.insert().execute( e1="a", e2="a", e2generic="a", e3="a", e4="a", e5="a", e5generic="a", e6="'a'", e7="a", e8="AMember", ) enum_table.insert().execute( e1="b", e2="b", e2generic="b", e3="b", e4="b", e5="b", e5generic="b", e6="b", e7="b", e8="BMember", ) res = enum_table.select().execute().fetchall() expected = [ (None, "a", "a", None, "a", None, None, None, None, None), ( "a", "a", "a", "a", "a", "a", "a", "'a'", EnumSetTest.SomeEnum.AMember, EnumSetTest.SomeEnum.AMember, ), ( "b", "b", "b", "b", "b", "b", "b", "b", EnumSetTest.SomeEnum.BMember, EnumSetTest.SomeEnum.BMember, ), ] eq_(res, expected)
def test_infinite_float(self): metadata = self.metadata t = Table("t", metadata, Column("data", Float)) metadata.create_all() t.insert().execute(data=float("inf")) eq_(t.select().execute().fetchall(), [(float("inf"), )])
class LegacySchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL): """Legacy behavior tried to prevent schema-qualified tables from being rendered as dotted names, and were instead aliased. This behavior no longer seems to be required. """ def setup(self): metadata = MetaData() self.t1 = table( "t1", column("a", Integer), column("b", String), column("c", String), ) self.t2 = Table( "t2", metadata, Column("a", Integer), Column("b", Integer), Column("c", Integer), schema="schema", ) def _assert_sql(self, element, legacy_sql, modern_sql=None): dialect = mssql.dialect(legacy_schema_aliasing=True) self.assert_compile(element, legacy_sql, dialect=dialect) dialect = mssql.dialect() self.assert_compile(element, modern_sql or "foob", dialect=dialect) def _legacy_dialect(self): return mssql.dialect(legacy_schema_aliasing=True) def test_result_map(self): s = self.t2.select() c = s.compile(dialect=self._legacy_dialect()) assert self.t2.c.a in set(c._create_result_map()["a"][1]) def test_result_map_use_labels(self): s = self.t2.select(use_labels=True) c = s.compile(dialect=self._legacy_dialect()) assert self.t2.c.a in set(c._create_result_map()["schema_t2_a"][1]) def test_straight_select(self): self._assert_sql( self.t2.select(), "SELECT t2_1.a, t2_1.b, t2_1.c FROM [schema].t2 AS t2_1", "SELECT [schema].t2.a, [schema].t2.b, " "[schema].t2.c FROM [schema].t2", ) def test_straight_select_use_labels(self): self._assert_sql( self.t2.select(use_labels=True), "SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b, " "t2_1.c AS schema_t2_c FROM [schema].t2 AS t2_1", "SELECT [schema].t2.a AS schema_t2_a, " "[schema].t2.b AS schema_t2_b, " "[schema].t2.c AS schema_t2_c FROM [schema].t2", ) def test_join_to_schema(self): t1, t2 = self.t1, self.t2 self._assert_sql( t1.join(t2, t1.c.a == t2.c.a).select(), "SELECT t1.a, t1.b, t1.c, t2_1.a, t2_1.b, t2_1.c FROM t1 " "JOIN [schema].t2 AS t2_1 ON t2_1.a = t1.a", "SELECT t1.a, t1.b, t1.c, [schema].t2.a, [schema].t2.b, " "[schema].t2.c FROM t1 JOIN [schema].t2 ON [schema].t2.a = t1.a", ) def test_union_schema_to_non(self): t1, t2 = self.t1, self.t2 s = ( select([t2.c.a, t2.c.b]) .apply_labels() .union(select([t1.c.a, t1.c.b]).apply_labels()) .alias() .select() ) self._assert_sql( s, "SELECT anon_1.schema_t2_a, anon_1.schema_t2_b FROM " "(SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b " "FROM [schema].t2 AS t2_1 UNION SELECT t1.a AS t1_a, " "t1.b AS t1_b FROM t1) AS anon_1", "SELECT anon_1.schema_t2_a, anon_1.schema_t2_b FROM " "(SELECT [schema].t2.a AS schema_t2_a, [schema].t2.b AS " "schema_t2_b FROM [schema].t2 UNION SELECT t1.a AS t1_a, " "t1.b AS t1_b FROM t1) AS anon_1", ) def test_column_subquery_to_alias(self): a1 = self.t2.alias("a1") s = select([self.t2, select([a1.c.a]).as_scalar()]) self._assert_sql( s, "SELECT t2_1.a, t2_1.b, t2_1.c, " "(SELECT a1.a FROM [schema].t2 AS a1) " "AS anon_1 FROM [schema].t2 AS t2_1", "SELECT [schema].t2.a, [schema].t2.b, [schema].t2.c, " "(SELECT a1.a FROM [schema].t2 AS a1) AS anon_1 FROM [schema].t2", )
def test_autoincrement(self): Table( "ai_1", metadata, Column("int_y", Integer, primary_key=True, autoincrement=True), Column("int_n", Integer, DefaultClause("0"), primary_key=True), ) Table( "ai_2", metadata, Column("int_y", Integer, primary_key=True, autoincrement=True), Column("int_n", Integer, DefaultClause("0"), primary_key=True), ) Table( "ai_3", metadata, Column("int_n", Integer, DefaultClause("0"), primary_key=True), Column("int_y", Integer, primary_key=True, autoincrement=True), ) Table( "ai_4", metadata, Column("int_n", Integer, DefaultClause("0"), primary_key=True), Column("int_n2", Integer, DefaultClause("0"), primary_key=True), ) Table( "ai_5", metadata, Column("int_y", Integer, primary_key=True, autoincrement=True), Column("int_n", Integer, DefaultClause("0"), primary_key=True), ) Table( "ai_6", metadata, Column("o1", String(1), DefaultClause("x"), primary_key=True), Column("int_y", Integer, primary_key=True, autoincrement=True), ) Table( "ai_7", metadata, Column("o1", String(1), DefaultClause("x"), primary_key=True), Column("o2", String(1), DefaultClause("x"), primary_key=True), Column("int_y", Integer, autoincrement=True, primary_key=True), ) Table( "ai_8", metadata, Column("o1", String(1), DefaultClause("x"), primary_key=True), Column("o2", String(1), DefaultClause("x"), primary_key=True), ) metadata.create_all() table_names = [ "ai_1", "ai_2", "ai_3", "ai_4", "ai_5", "ai_6", "ai_7", "ai_8", ] mr = MetaData(testing.db) for name in table_names: tbl = Table(name, mr, autoload=True) tbl = metadata.tables[name] # test that the flag itself reflects appropriately for col in tbl.c: if "int_y" in col.name: is_(col.autoincrement, True) is_(tbl._autoincrement_column, col) else: eq_(col.autoincrement, "auto") is_not(tbl._autoincrement_column, col) # mxodbc can't handle scope_identity() with DEFAULT VALUES if testing.db.driver == "mxodbc": eng = [ engines.testing_engine( options={"implicit_returning": True}) ] else: eng = [ engines.testing_engine( options={"implicit_returning": False}), engines.testing_engine( options={"implicit_returning": True}), ] for counter, engine in enumerate(eng): with engine.begin() as conn: conn.execute(tbl.insert()) if "int_y" in tbl.c: eq_( conn.execute(select([tbl.c.int_y])).scalar(), counter + 1, ) assert (list(conn.execute( tbl.select()).first()).count(counter + 1) == 1) else: assert 1 not in list( conn.execute(tbl.select()).first()) conn.execute(tbl.delete())
def test_update(self): """ Tests sending functions and SQL expressions to the VALUES and SET clauses of INSERT/UPDATE instances, and that column-level defaults get overridden. """ meta = self.metadata t = Table( "t1", meta, Column( "id", Integer, Sequence("t1idseq", optional=True), primary_key=True, ), Column("value", Integer), ) t2 = Table( "t2", meta, Column( "id", Integer, Sequence("t2idseq", optional=True), primary_key=True, ), Column("value", Integer, default=7), Column("stuff", String(20), onupdate="thisisstuff"), ) meta.create_all() t.insert(values=dict(value=func.length("one"))).execute() assert t.select().execute().first()["value"] == 3 t.update(values=dict(value=func.length("asfda"))).execute() assert t.select().execute().first()["value"] == 5 r = t.insert(values=dict(value=func.length("sfsaafsda"))).execute() id_ = r.inserted_primary_key[0] assert t.select(t.c.id == id_).execute().first()["value"] == 9 t.update(values={t.c.value: func.length("asdf")}).execute() assert t.select().execute().first()["value"] == 4 t2.insert().execute() t2.insert(values=dict(value=func.length("one"))).execute() t2.insert(values=dict(value=func.length("asfda") + -19)).execute( stuff="hi" ) res = exec_sorted(select([t2.c.value, t2.c.stuff])) eq_(res, [(-14, "hi"), (3, None), (7, None)]) t2.update(values=dict(value=func.length("asdsafasd"))).execute( stuff="some stuff" ) assert select([t2.c.value, t2.c.stuff]).execute().fetchall() == [ (9, "some stuff"), (9, "some stuff"), (9, "some stuff"), ] t2.delete().execute() t2.insert(values=dict(value=func.length("one") + 8)).execute() assert t2.select().execute().first()["value"] == 11 t2.update(values=dict(value=func.length("asfda"))).execute() eq_( select([t2.c.value, t2.c.stuff]).execute().first(), (5, "thisisstuff"), ) t2.update( values={t2.c.value: func.length("asfdaasdf"), t2.c.stuff: "foo"} ).execute() eq_(select([t2.c.value, t2.c.stuff]).execute().first(), (9, "foo"))
def test_with_polymorphic(self): metadata = MetaData(testing.db) order = Table( "orders", metadata, Column("id", Integer, primary_key=True), Column( "employee_id", Integer, ForeignKey("employees.id"), nullable=False, ), Column("type", Unicode(16)), ) employee = Table( "employees", metadata, Column("id", Integer, primary_key=True), Column("name", Unicode(16), unique=True, nullable=False), ) product = Table("products", metadata, Column("id", Integer, primary_key=True)) orderproduct = Table( "orderproducts", metadata, Column("id", Integer, primary_key=True), Column("order_id", Integer, ForeignKey("orders.id"), nullable=False), Column( "product_id", Integer, ForeignKey("products.id"), nullable=False, ), ) class Order(object): pass class Employee(object): pass class Product(object): pass class OrderProduct(object): pass order_join = order.select().alias("pjoin") mapper( Order, order, with_polymorphic=("*", order_join), polymorphic_on=order_join.c.type, polymorphic_identity="order", properties={ "orderproducts": relationship(OrderProduct, lazy="select", backref="order") }, ) mapper( Product, product, properties={ "orderproducts": relationship(OrderProduct, lazy="select", backref="product") }, ) mapper( Employee, employee, properties={ "orders": relationship(Order, lazy="select", backref="employee") }, ) mapper(OrderProduct, orderproduct) # this requires that the compilation of order_mapper's "surrogate # mapper" occur after the initial setup of MapperProperty objects on # the mapper. configure_mappers()
def test_conflicting_backref_one(self): """test that conflicting backrefs raises an exception""" metadata = MetaData(testing.db) order = Table( "orders", metadata, Column("id", Integer, primary_key=True), Column("type", Unicode(16)), ) product = Table("products", metadata, Column("id", Integer, primary_key=True)) orderproduct = Table( "orderproducts", metadata, Column("id", Integer, primary_key=True), Column("order_id", Integer, ForeignKey("orders.id"), nullable=False), Column( "product_id", Integer, ForeignKey("products.id"), nullable=False, ), ) class Order(object): pass class Product(object): pass class OrderProduct(object): pass order_join = order.select().alias("pjoin") mapper( Order, order, with_polymorphic=("*", order_join), polymorphic_on=order_join.c.type, polymorphic_identity="order", properties={ "orderproducts": relationship(OrderProduct, lazy="select", backref="product") }, ) mapper( Product, product, properties={ "orderproducts": relationship(OrderProduct, lazy="select", backref="product") }, ) mapper(OrderProduct, orderproduct) assert_raises_message(sa_exc.ArgumentError, "Error creating backref", configure_mappers)
def _assert_data_noautoincrement(self, table): engine = engines.testing_engine(options={"implicit_returning": False}) with engine.connect() as conn: conn.execute(table.insert(), {"id": 30, "data": "d1"}) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, ) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, {"data": "d3"}, ) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, ) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, {"data": "d3"}, ) conn.execute( table.insert(), { "id": 31, "data": "d2" }, { "id": 32, "data": "d3" }, ) conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"}) eq_( conn.execute(table.select()).fetchall(), [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")], ) conn.execute(table.delete()) # test the same series of events using a reflected version of # the table m2 = MetaData(engine) table = Table(table.name, m2, autoload=True) with engine.connect() as conn: conn.execute(table.insert(), {"id": 30, "data": "d1"}) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, ) with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), conn.execute, table.insert(), {"data": "d2"}, {"data": "d3"}, ) conn.execute( table.insert(), { "id": 31, "data": "d2" }, { "id": 32, "data": "d3" }, ) conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"}) eq_( conn.execute(table.select()).fetchall(), [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")], )