def test_rowcount_flag(self):
     metadata = self.metadata
     engine = engines.testing_engine(options={"enable_rowcount": True})
     assert engine.dialect.supports_sane_rowcount
     metadata.bind = engine
     t = Table("t1", metadata, Column("data", String(10)))
     metadata.create_all()
     r = t.insert().execute({"data": "d1"}, {"data": "d2"}, {"data": "d3"})
     r = t.update().where(t.c.data == "d2").values(data="d3").execute()
     eq_(r.rowcount, 1)
     r = t.delete().where(t.c.data == "d3").execute()
     eq_(r.rowcount, 2)
     r = t.delete().execution_options(enable_rowcount=False).execute()
     eq_(r.rowcount, -1)
     engine.dispose()
     engine = engines.testing_engine(options={"enable_rowcount": False})
     assert not engine.dialect.supports_sane_rowcount
     metadata.bind = engine
     r = t.insert().execute({"data": "d1"}, {"data": "d2"}, {"data": "d3"})
     r = t.update().where(t.c.data == "d2").values(data="d3").execute()
     eq_(r.rowcount, -1)
     r = t.delete().where(t.c.data == "d3").execute()
     eq_(r.rowcount, -1)
     r = t.delete().execution_options(enable_rowcount=True).execute()
     eq_(r.rowcount, 1)
     r.close()
     engine.dispose()
    def test_boolean_roundtrip_reflected(self, boolean_table, store, expected):
        meta2 = MetaData(testing.db)
        table = Table("mysql_bool", meta2, autoload=True)
        eq_(colspec(table.c.b3), "b3 TINYINT(1)")
        eq_regex(colspec(table.c.b4), r"b4 TINYINT(?:\(1\))? UNSIGNED")

        meta2 = MetaData(testing.db)
        table = Table(
            "mysql_bool",
            meta2,
            Column("b1", BOOLEAN),
            Column("b2", Boolean),
            Column("b3", BOOLEAN),
            Column("b4", BOOLEAN),
            autoload=True,
        )
        eq_(colspec(table.c.b3), "b3 BOOL")
        eq_(colspec(table.c.b4), "b4 BOOL")

        with testing.db.connect() as conn:
            expected = expected or store
            conn.execute(table.insert(store))
            row = conn.execute(table.select()).first()
            eq_(list(row), expected)
            for i, val in enumerate(expected):
                if isinstance(val, bool):
                    self.assert_(val is row[i])
            conn.execute(table.delete())
    def test_delete_schema_legacy(self):
        meta = self.metadata
        eng = engines.testing_engine(options=dict(legacy_schema_aliasing=True))
        meta.bind = eng
        con = eng.connect()
        con.execute("create schema paj")

        @event.listens_for(meta, "after_drop")
        def cleanup(target, connection, **kw):
            connection.execute("drop schema paj")

        tbl = Table(
            "test", meta, Column("id", Integer, primary_key=True), schema="paj"
        )
        tbl.create()
        tbl.insert().execute({"id": 1})
        eq_(tbl.select().scalar(), 1)
        tbl.delete(tbl.c.id == 1).execute()
        eq_(tbl.select().scalar(), None)
    def test_bit_50_roundtrip_reflected(self, bit_table, store, expected):
        meta2 = MetaData()
        bit_table = Table("mysql_bits", meta2, autoload_with=testing.db)

        with testing.db.connect() as conn:
            expected = expected or store
            conn.execute(bit_table.insert(store))
            row = conn.execute(bit_table.select()).first()
            eq_(list(row), expected)
            conn.execute(bit_table.delete())
 def test_delete_schema(self):
     metadata = MetaData()
     tbl = Table(
         "test",
         metadata,
         Column("id", Integer, primary_key=True),
         schema="paj",
     )
     self.assert_compile(
         tbl.delete(tbl.c.id == 1),
         "DELETE FROM paj.test WHERE paj.test.id = "
         ":id_1",
     )
     s = select([tbl.c.id]).where(tbl.c.id == 1)
     self.assert_compile(
         tbl.delete().where(tbl.c.id.in_(s)),
         "DELETE FROM paj.test WHERE paj.test.id IN "
         "(SELECT paj.test.id FROM paj.test "
         "WHERE paj.test.id = :id_1)",
     )
 def test_delete_schema_multipart(self):
     metadata = MetaData()
     tbl = Table(
         "test",
         metadata,
         Column("id", Integer, primary_key=True),
         schema="banana.paj",
     )
     self.assert_compile(
         tbl.delete(tbl.c.id == 1),
         "DELETE FROM banana.paj.test WHERE "
         "banana.paj.test.id = :id_1",
     )
     s = select([tbl.c.id]).where(tbl.c.id == 1)
     self.assert_compile(
         tbl.delete().where(tbl.c.id.in_(s)),
         "DELETE FROM banana.paj.test WHERE "
         "banana.paj.test.id IN (SELECT banana.paj.test.id "
         "FROM banana.paj.test WHERE "
         "banana.paj.test.id = :id_1)",
     )
Example #7
0
 def test_schema_roundtrips(self):
     meta = self.metadata
     users = Table(
         "users",
         meta,
         Column("id", Integer, primary_key=True),
         Column("name", String(50)),
         schema="test_schema",
     )
     users.create()
     users.insert().execute(id=1, name="name1")
     users.insert().execute(id=2, name="name2")
     users.insert().execute(id=3, name="name3")
     users.insert().execute(id=4, name="name4")
     eq_(
         users.select().where(users.c.name == "name2").execute().fetchall(),
         [(2, "name2")],
     )
     eq_(
         users.select(use_labels=True)
         .where(users.c.name == "name2")
         .execute()
         .fetchall(),
         [(2, "name2")],
     )
     users.delete().where(users.c.id == 3).execute()
     eq_(
         users.select().where(users.c.name == "name3").execute().fetchall(),
         [],
     )
     users.update().where(users.c.name == "name4").execute(name="newname")
     eq_(
         users.select(use_labels=True)
         .where(users.c.id == 4)
         .execute()
         .fetchall(),
         [(4, "newname")],
     )
 def test_delete_schema_multipart_both_need_quoting(self):
     metadata = MetaData()
     tbl = Table(
         "test",
         metadata,
         Column("id", Integer, primary_key=True),
         schema="banana split.paj with a space",
     )
     self.assert_compile(
         tbl.delete(tbl.c.id == 1),
         "DELETE FROM [banana split].[paj with a "
         "space].test WHERE [banana split].[paj "
         "with a space].test.id = :id_1",
     )
     s = select([tbl.c.id]).where(tbl.c.id == 1)
     self.assert_compile(
         tbl.delete().where(tbl.c.id.in_(s)),
         "DELETE FROM [banana split].[paj with a space].test "
         "WHERE [banana split].[paj with a space].test.id IN "
         "(SELECT [banana split].[paj with a space].test.id "
         "FROM [banana split].[paj with a space].test "
         "WHERE [banana split].[paj with a space].test.id = :id_1)",
     )
    def _test_round_trip(self,
                         type_,
                         data,
                         deprecate_large_types=True,
                         expected=None):
        if (testing.db.dialect.deprecate_large_types
                is not deprecate_large_types):
            engine = engines.testing_engine(
                options={"deprecate_large_types": deprecate_large_types})
        else:
            engine = testing.db

        binary_table = Table(
            "binary_table",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("data", type_),
        )
        binary_table.create(engine)

        if expected is None:
            expected = data

        with engine.connect() as conn:
            conn.execute(binary_table.insert(), data=data)

            eq_(conn.scalar(select([binary_table.c.data])), expected)

            eq_(
                conn.scalar(
                    text("select data from binary_table").columns(
                        binary_table.c.data)),
                expected,
            )

            conn.execute(binary_table.delete())

            conn.execute(binary_table.insert(), data=None)
            eq_(conn.scalar(select([binary_table.c.data])), None)

            eq_(
                conn.scalar(
                    text("select data from binary_table").columns(
                        binary_table.c.data)),
                None,
            )
    def test_native_odbc_execute(self):
        t1 = Table("t1", MetaData(), Column("c1", Integer))
        dbapi = mock_dbapi()

        engine = engines.testing_engine(
            "mssql+mxodbc://localhost",
            options={"module": dbapi, "_initialize": False},
        )
        conn = engine.connect()

        # crud: uses execute
        conn.execute(t1.insert().values(c1="foo"))
        conn.execute(t1.delete().where(t1.c.c1 == "foo"))
        conn.execute(t1.update().where(t1.c.c1 == "foo").values(c1="bar"))

        # select: uses executedirect
        conn.execute(t1.select())

        # manual flagging
        conn.execution_options(native_odbc_execute=True).execute(t1.select())
        conn.execution_options(native_odbc_execute=False).execute(
            t1.insert().values(c1="foo")
        )

        eq_(
            # fmt: off
            [
                c[2]
                for c in dbapi.connect.return_value.cursor.
                return_value.execute.mock_calls
            ],
            # fmt: on
            [
                {"direct": True},
                {"direct": True},
                {"direct": True},
                {"direct": True},
                {"direct": False},
                {"direct": True},
            ]
        )
    def test_update(self):
        """
        Tests sending functions and SQL expressions to the VALUES and SET
        clauses of INSERT/UPDATE instances, and that column-level defaults
        get overridden.
        """

        meta = self.metadata
        t = Table(
            "t1",
            meta,
            Column(
                "id",
                Integer,
                Sequence("t1idseq", optional=True),
                primary_key=True,
            ),
            Column("value", Integer),
        )
        t2 = Table(
            "t2",
            meta,
            Column(
                "id",
                Integer,
                Sequence("t2idseq", optional=True),
                primary_key=True,
            ),
            Column("value", Integer, default=7),
            Column("stuff", String(20), onupdate="thisisstuff"),
        )
        meta.create_all()
        t.insert(values=dict(value=func.length("one"))).execute()
        assert t.select().execute().first()["value"] == 3
        t.update(values=dict(value=func.length("asfda"))).execute()
        assert t.select().execute().first()["value"] == 5

        r = t.insert(values=dict(value=func.length("sfsaafsda"))).execute()
        id_ = r.inserted_primary_key[0]
        assert t.select(t.c.id == id_).execute().first()["value"] == 9
        t.update(values={t.c.value: func.length("asdf")}).execute()
        assert t.select().execute().first()["value"] == 4
        t2.insert().execute()
        t2.insert(values=dict(value=func.length("one"))).execute()
        t2.insert(values=dict(value=func.length("asfda") + -19)).execute(
            stuff="hi"
        )

        res = exec_sorted(select([t2.c.value, t2.c.stuff]))
        eq_(res, [(-14, "hi"), (3, None), (7, None)])

        t2.update(values=dict(value=func.length("asdsafasd"))).execute(
            stuff="some stuff"
        )
        assert select([t2.c.value, t2.c.stuff]).execute().fetchall() == [
            (9, "some stuff"),
            (9, "some stuff"),
            (9, "some stuff"),
        ]

        t2.delete().execute()

        t2.insert(values=dict(value=func.length("one") + 8)).execute()
        assert t2.select().execute().first()["value"] == 11

        t2.update(values=dict(value=func.length("asfda"))).execute()
        eq_(
            select([t2.c.value, t2.c.stuff]).execute().first(),
            (5, "thisisstuff"),
        )

        t2.update(
            values={t2.c.value: func.length("asfdaasdf"), t2.c.stuff: "foo"}
        ).execute()

        eq_(select([t2.c.value, t2.c.stuff]).execute().first(), (9, "foo"))
    def _assert_data_autoincrement_returning(self, table):
        engine = engines.testing_engine(options={"implicit_returning": True})

        with self.sql_execution_asserter(engine) as asserter:
            with engine.connect() as conn:

                # execute with explicit id

                r = conn.execute(table.insert(), {"id": 30, "data": "d1"})
                eq_(r.inserted_primary_key, [30])

                # execute with prefetch id

                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, [1])

                # executemany with explicit ids

                conn.execute(
                    table.insert(),
                    {
                        "id": 31,
                        "data": "d3"
                    },
                    {
                        "id": 32,
                        "data": "d4"
                    },
                )

                # executemany, uses SERIAL

                conn.execute(table.insert(), {"data": "d5"}, {"data": "d6"})

                # single execute, explicit id, inline

                conn.execute(table.insert(inline=True), {
                    "id": 33,
                    "data": "d7"
                })

                # single execute, inline, uses SERIAL

                conn.execute(table.insert(inline=True), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.connect() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (1, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (2, "d5"),
                    (3, "d6"),
                    (33, "d7"),
                    (4, "d8"),
                ],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData(engine)
        table = Table(table.name, m2, autoload=True)

        with self.sql_execution_asserter(engine) as asserter:
            with engine.connect() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, [5])
                conn.execute(
                    table.insert(),
                    {
                        "id": 31,
                        "data": "d3"
                    },
                    {
                        "id": 32,
                        "data": "d4"
                    },
                )
                conn.execute(table.insert(), {"data": "d5"}, {"data": "d6"})
                conn.execute(table.insert(inline=True), {
                    "id": 33,
                    "data": "d7"
                })
                conn.execute(table.insert(inline=True), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.connect() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (5, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (6, "d5"),
                    (7, "d6"),
                    (33, "d7"),
                    (8, "d8"),
                ],
            )
            conn.execute(table.delete())
    def test_autoincrement(self):
        Table(
            "ai_1",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_2",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_3",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )

        Table(
            "ai_4",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_n2", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_5",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_6",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )
        Table(
            "ai_7",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, autoincrement=True, primary_key=True),
        )
        Table(
            "ai_8",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
        )
        metadata.create_all()

        table_names = [
            "ai_1",
            "ai_2",
            "ai_3",
            "ai_4",
            "ai_5",
            "ai_6",
            "ai_7",
            "ai_8",
        ]
        mr = MetaData(testing.db)

        for name in table_names:
            tbl = Table(name, mr, autoload=True)
            tbl = metadata.tables[name]

            # test that the flag itself reflects appropriately
            for col in tbl.c:
                if "int_y" in col.name:
                    is_(col.autoincrement, True)
                    is_(tbl._autoincrement_column, col)
                else:
                    eq_(col.autoincrement, "auto")
                    is_not(tbl._autoincrement_column, col)

            # mxodbc can't handle scope_identity() with DEFAULT VALUES

            if testing.db.driver == "mxodbc":
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": True})
                ]
            else:
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": False}),
                    engines.testing_engine(
                        options={"implicit_returning": True}),
                ]

            for counter, engine in enumerate(eng):
                with engine.begin() as conn:
                    conn.execute(tbl.insert())
                    if "int_y" in tbl.c:
                        eq_(
                            conn.execute(select([tbl.c.int_y])).scalar(),
                            counter + 1,
                        )
                        assert (list(conn.execute(
                            tbl.select()).first()).count(counter + 1) == 1)
                    else:
                        assert 1 not in list(
                            conn.execute(tbl.select()).first())
                    conn.execute(tbl.delete())