def test_rudimental_round_trip(self):
        # note that test_suite has many more JSON round trip tests
        # using the backend-agnostic JSON type

        mysql_json = Table("mysql_json", self.metadata,
                           Column("foo", mysql.JSON))
        self.metadata.create_all()

        value = {"json": {"foo": "bar"}, "recs": ["one", "two"]}

        with testing.db.connect() as conn:
            conn.execute(mysql_json.insert(), foo=value)

            eq_(conn.scalar(select([mysql_json.c.foo])), value)
    def test_broken_enum_returns_blanks(self):
        t = Table(
            "enum_missing",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("e1", sqltypes.Enum("one", "two", "three")),
            Column("e2", mysql.ENUM("one", "two", "three")),
        )
        t.create()

        with testing.db.connect() as conn:
            conn.execute(t.insert(), {
                "e1": "nonexistent",
                "e2": "nonexistent"
            })
            conn.execute(t.insert(), {"e1": "", "e2": ""})
            conn.execute(t.insert(), {"e1": "two", "e2": "two"})
            conn.execute(t.insert(), {"e1": None, "e2": None})

            eq_(
                conn.execute(select([t.c.e1,
                                     t.c.e2]).order_by(t.c.id)).fetchall(),
                [("", ""), ("", ""), ("two", "two"), (None, None)],
            )
    def test_unicode_roundtrip(self):
        set_table = Table(
            "t",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("data", mysql.SET(u("réveillé"), u("drôle"), u("S’il"))),
        )

        set_table.create()
        with testing.db.begin() as conn:
            conn.execute(
                set_table.insert(),
                {"data": set([u("réveillé"), u("drôle")])})

            row = conn.execute(set_table.select()).first()

            eq_(row, (1, set([u("réveillé"), u("drôle")])))
Beispiel #4
0
 def test_anticipate_no_pk_composite_pk_prefetch(self):
     t = Table(
         "t",
         MetaData(),
         Column("x", Integer, primary_key=True),
         Column("y", Integer, primary_key=True),
     )
     d = postgresql.dialect()
     d.implicit_returning = False
     with expect_warnings("Column 't.y' is marked as a member.*"
                          "Note that as of SQLAlchemy 1.1,"):
         self.assert_compile(
             t.insert(),
             "INSERT INTO t (x) VALUES (%(x)s)",
             params={"x": 5},
             dialect=d,
         )
Beispiel #5
0
 def test_anticipate_no_pk_non_composite_pk_implicit_returning(self):
     t = Table(
         "t",
         MetaData(),
         Column("x", Integer, primary_key=True, autoincrement=False),
         Column("q", Integer),
     )
     d = postgresql.dialect()
     d.implicit_returning = True
     with expect_warnings("Column 't.x' is marked as a member.*"
                          "may not store NULL.$"):
         self.assert_compile(
             t.insert(),
             "INSERT INTO t (q) VALUES (%(q)s)",
             params={"q": 5},
             dialect=d,
         )
Beispiel #6
0
    def test_insert_from_select_seq(self):
        m = MetaData()

        t1 = Table(
            "t",
            m,
            Column("id", Integer, Sequence("id_seq"), primary_key=True),
            Column("data", String),
        )

        stmt = t1.insert().from_select(("data", ), select([t1.c.data]))

        self.assert_compile(
            stmt,
            "INSERT INTO t (data, id) SELECT t.data, "
            "nextval('id_seq') AS next_value_1 FROM t",
            dialect=postgresql.dialect(),
        )
Beispiel #7
0
    def setUp(self):
        global db1, db2, db3, db4, weather_locations, weather_reports

        db1, db2, db3, db4 = self._init_dbs()

        meta = MetaData()
        ids = Table("ids", meta, Column("nextid", Integer, nullable=False))

        def id_generator(ctx):
            # in reality, might want to use a separate transaction for this.

            c = db1.connect()
            nextid = c.execute(ids.select().with_for_update()).scalar()
            c.execute(ids.update(values={ids.c.nextid: ids.c.nextid + 1}))
            return nextid

        weather_locations = Table(
            "weather_locations",
            meta,
            Column("id", Integer, primary_key=True, default=id_generator),
            Column("continent", String(30), nullable=False),
            Column("city", String(50), nullable=False),
            schema=self.schema,
        )

        weather_reports = Table(
            "weather_reports",
            meta,
            Column("id", Integer, primary_key=True),
            Column("location_id", Integer, ForeignKey(weather_locations.c.id)),
            Column("temperature", Float),
            Column("report_time", DateTime, default=datetime.datetime.now),
            schema=self.schema,
        )

        for db in (db1, db2, db3, db4):
            meta.create_all(db)

        db1.execute(ids.insert(), nextid=1)

        self.setup_session()
        self.setup_mappers()
    def test_enable_scope_identity(self):
        engine = engines.testing_engine(options={"use_scope_identity": True})
        metadata = self.metadata
        t1 = Table(
            "t1",
            metadata,
            Column("id", Integer, primary_key=True),
            implicit_returning=False,
        )
        metadata.create_all(engine)

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:
                conn.execute(t1.insert())

        # even with pyodbc, we don't embed the scope identity on a
        # DEFAULT VALUES insert
        asserter.assert_(
            CursorSQL("INSERT INTO t1 DEFAULT VALUES"),
            CursorSQL("SELECT scope_identity() AS lastrowid"),
        )
Beispiel #9
0
    def test_inline_defaults(self):
        m = MetaData()
        foo = Table("foo", m, Column("id", Integer))

        t = Table(
            "test",
            m,
            Column("col1", Integer, default=func.foo(1)),
            Column(
                "col2",
                Integer,
                default=select([func.coalesce(func.max(foo.c.id))]),
            ),
        )

        self.assert_compile(
            t.insert(inline=True, values={}),
            "INSERT INTO test (col1, col2) VALUES (foo(:foo_1), "
            "(SELECT coalesce(max(foo.id)) AS coalesce_1 FROM "
            "foo))",
        )
    def test_embedded_scope_identity(self):
        engine = engines.testing_engine(options={"use_scope_identity": True})
        metadata = self.metadata
        t1 = Table(
            "t1",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("data", String(50)),
            implicit_returning=False,
        )
        metadata.create_all(engine)

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:
                conn.execute(t1.insert(), {"data": "somedata"})

        # pyodbc-specific system
        asserter.assert_(
            CursorSQL(
                "INSERT INTO t1 (data) VALUES (?); select scope_identity()",
                ("somedata",),
            )
        )
    def _test_disable_scope_identity(self):
        engine = engines.testing_engine(options={"use_scope_identity": False})
        metadata = self.metadata
        t1 = Table(
            "t1",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("data", String(50)),
            implicit_returning=False,
        )
        metadata.create_all(engine)

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:
                conn.execute(t1.insert(), {"data": "somedata"})

        # TODO: need a dialect SQL that acts like Cursor SQL
        asserter.assert_(
            DialectSQL(
                "INSERT INTO t1 (data) VALUES (:data)", {"data": "somedata"}
            ),
            CursorSQL("SELECT @@identity AS lastrowid"),
        )
 def test_empty_set_empty_string(self):
     t = Table(
         "t",
         self.metadata,
         Column("id", Integer),
         Column("data", mysql.SET("a", "b", "", retrieve_as_bitwise=True)),
     )
     t.create()
     with testing.db.begin() as conn:
         conn.execute(
             t.insert(),
             {
                 "id": 1,
                 "data": set()
             },
             {
                 "id": 2,
                 "data": set([""])
             },
             {
                 "id": 3,
                 "data": set(["a", ""])
             },
             {
                 "id": 4,
                 "data": set(["b"])
             },
         )
         eq_(
             conn.execute(t.select().order_by(t.c.id)).fetchall(),
             [
                 (1, set()),
                 (2, set([""])),
                 (3, set(["a", ""])),
                 (4, set(["b"])),
             ],
         )
    def _test_round_trip(self, tab, cls, convert_int):
        t = Table(
            tab,
            MetaData(),
            Column("data", String(50)),
            Column("rv", cls(convert_int=convert_int)),
        )

        with testing.db.connect() as conn:
            conn.execute(t.insert().values(data="foo"))
            last_ts_1 = conn.scalar("SELECT @@DBTS")

            if convert_int:
                last_ts_1 = int(codecs.encode(last_ts_1, "hex"), 16)

            eq_(conn.scalar(select([t.c.rv])), last_ts_1)

            conn.execute(
                t.update().values(data="bar").where(t.c.data == "foo"))
            last_ts_2 = conn.scalar("SELECT @@DBTS")
            if convert_int:
                last_ts_2 = int(codecs.encode(last_ts_2, "hex"), 16)

            eq_(conn.scalar(select([t.c.rv])), last_ts_2)
    def test_update(self):
        """
        Tests sending functions and SQL expressions to the VALUES and SET
        clauses of INSERT/UPDATE instances, and that column-level defaults
        get overridden.
        """

        meta = self.metadata
        t = Table(
            "t1",
            meta,
            Column(
                "id",
                Integer,
                Sequence("t1idseq", optional=True),
                primary_key=True,
            ),
            Column("value", Integer),
        )
        t2 = Table(
            "t2",
            meta,
            Column(
                "id",
                Integer,
                Sequence("t2idseq", optional=True),
                primary_key=True,
            ),
            Column("value", Integer, default=7),
            Column("stuff", String(20), onupdate="thisisstuff"),
        )
        meta.create_all()
        t.insert(values=dict(value=func.length("one"))).execute()
        assert t.select().execute().first()["value"] == 3
        t.update(values=dict(value=func.length("asfda"))).execute()
        assert t.select().execute().first()["value"] == 5

        r = t.insert(values=dict(value=func.length("sfsaafsda"))).execute()
        id_ = r.inserted_primary_key[0]
        assert t.select(t.c.id == id_).execute().first()["value"] == 9
        t.update(values={t.c.value: func.length("asdf")}).execute()
        assert t.select().execute().first()["value"] == 4
        t2.insert().execute()
        t2.insert(values=dict(value=func.length("one"))).execute()
        t2.insert(values=dict(value=func.length("asfda") + -19)).execute(
            stuff="hi"
        )

        res = exec_sorted(select([t2.c.value, t2.c.stuff]))
        eq_(res, [(-14, "hi"), (3, None), (7, None)])

        t2.update(values=dict(value=func.length("asdsafasd"))).execute(
            stuff="some stuff"
        )
        assert select([t2.c.value, t2.c.stuff]).execute().fetchall() == [
            (9, "some stuff"),
            (9, "some stuff"),
            (9, "some stuff"),
        ]

        t2.delete().execute()

        t2.insert(values=dict(value=func.length("one") + 8)).execute()
        assert t2.select().execute().first()["value"] == 11

        t2.update(values=dict(value=func.length("asfda"))).execute()
        eq_(
            select([t2.c.value, t2.c.stuff]).execute().first(),
            (5, "thisisstuff"),
        )

        t2.update(
            values={t2.c.value: func.length("asfdaasdf"), t2.c.stuff: "foo"}
        ).execute()

        eq_(select([t2.c.value, t2.c.stuff]).execute().first(), (9, "foo"))
 def test_infinite_float(self):
     metadata = self.metadata
     t = Table("t", metadata, Column("data", Float))
     metadata.create_all()
     t.insert().execute(data=float("inf"))
     eq_(t.select().execute().fetchall(), [(float("inf"), )])
    def test_enum(self):
        """Exercise the ENUM type."""

        with testing.expect_deprecated("Manually quoting ENUM value literals"):
            e1, e2 = mysql.ENUM("'a'", "'b'"), mysql.ENUM("'a'", "'b'")
            e3 = mysql.ENUM("'a'", "'b'", strict=True)
            e4 = mysql.ENUM("'a'", "'b'", strict=True)

        enum_table = Table(
            "mysql_enum",
            self.metadata,
            Column("e1", e1),
            Column("e2", e2, nullable=False),
            Column(
                "e2generic",
                Enum("a", "b", validate_strings=True),
                nullable=False,
            ),
            Column("e3", e3),
            Column("e4", e4, nullable=False),
            Column("e5", mysql.ENUM("a", "b")),
            Column("e5generic", Enum("a", "b")),
            Column("e6", mysql.ENUM("'a'", "b")),
            Column(
                "e7",
                mysql.ENUM(
                    EnumSetTest.SomeEnum,
                    values_callable=EnumSetTest.get_enum_string_values,
                ),
            ),
            Column("e8", mysql.ENUM(EnumSetTest.SomeEnum)),
        )

        eq_(colspec(enum_table.c.e1), "e1 ENUM('a','b')")
        eq_(colspec(enum_table.c.e2), "e2 ENUM('a','b') NOT NULL")
        eq_(colspec(enum_table.c.e2generic),
            "e2generic ENUM('a','b') NOT NULL")
        eq_(colspec(enum_table.c.e3), "e3 ENUM('a','b')")
        eq_(colspec(enum_table.c.e4), "e4 ENUM('a','b') NOT NULL")
        eq_(colspec(enum_table.c.e5), "e5 ENUM('a','b')")
        eq_(colspec(enum_table.c.e5generic), "e5generic ENUM('a','b')")
        eq_(colspec(enum_table.c.e6), "e6 ENUM('''a''','b')")
        eq_(colspec(enum_table.c.e7), "e7 ENUM('1','2','3','a','b')")
        eq_(
            colspec(enum_table.c.e8),
            "e8 ENUM('one','two','three','AMember','BMember')",
        )
        enum_table.create()

        assert_raises(
            exc.DBAPIError,
            enum_table.insert().execute,
            e1=None,
            e2=None,
            e3=None,
            e4=None,
        )

        assert enum_table.c.e2generic.type.validate_strings
        assert_raises(
            exc.StatementError,
            enum_table.insert().execute,
            e1="c",
            e2="c",
            e2generic="c",
            e3="c",
            e4="c",
            e5="c",
            e5generic="c",
            e6="c",
            e7="c",
            e8="c",
        )

        enum_table.insert().execute()
        enum_table.insert().execute(
            e1="a",
            e2="a",
            e2generic="a",
            e3="a",
            e4="a",
            e5="a",
            e5generic="a",
            e6="'a'",
            e7="a",
            e8="AMember",
        )
        enum_table.insert().execute(
            e1="b",
            e2="b",
            e2generic="b",
            e3="b",
            e4="b",
            e5="b",
            e5generic="b",
            e6="b",
            e7="b",
            e8="BMember",
        )

        res = enum_table.select().execute().fetchall()

        expected = [
            (None, "a", "a", None, "a", None, None, None, None, None),
            (
                "a",
                "a",
                "a",
                "a",
                "a",
                "a",
                "a",
                "'a'",
                EnumSetTest.SomeEnum.AMember,
                EnumSetTest.SomeEnum.AMember,
            ),
            (
                "b",
                "b",
                "b",
                "b",
                "b",
                "b",
                "b",
                "b",
                EnumSetTest.SomeEnum.BMember,
                EnumSetTest.SomeEnum.BMember,
            ),
        ]

        eq_(res, expected)
    def test_fetchid_trigger(self):
        """
        Verify identity return value on inserting to a trigger table.

        MSSQL's OUTPUT INSERTED clause does not work for the
        case of a table having an identity (autoincrement)
        primary key column, and which also has a trigger configured
        to fire upon each insert and subsequently perform an
        insert into a different table.

        SQLALchemy's MSSQL dialect by default will attempt to
        use an OUTPUT_INSERTED clause, which in this case will
        raise the following error:

        ProgrammingError: (ProgrammingError) ('42000', 334,
        "[Microsoft][SQL Server Native Client 10.0][SQL Server]The
        target table 't1' of the DML statement cannot have any enabled
        triggers if the statement contains an OUTPUT clause without
        INTO clause.", 7748) 'INSERT INTO t1 (descr) OUTPUT inserted.id
        VALUES (?)' ('hello',)

        This test verifies a workaround, which is to rely on the
        older SCOPE_IDENTITY() call, which still works for this scenario.
        To enable the workaround, the Table must be instantiated
        with the init parameter 'implicit_returning = False'.
        """

        # todo: this same test needs to be tried in a multithreaded context
        #      with multiple threads inserting to the same table.
        # todo: check whether this error also occurs with clients other
        #      than the SQL Server Native Client. Maybe an assert_raises
        #      test should be written.
        meta = MetaData(testing.db)
        t1 = Table(
            "t1",
            meta,
            Column("id", Integer, mssql_identity_start=100, primary_key=True),
            Column("descr", String(200)),
            # the following flag will prevent the
            # MSSQLCompiler.returning_clause from getting called,
            # though the ExecutionContext will still have a
            # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will
            # hopefully be called instead.
            implicit_returning=False,
        )
        t2 = Table(
            "t2",
            meta,
            Column("id", Integer, mssql_identity_start=200, primary_key=True),
            Column("descr", String(200)),
        )
        meta.create_all()
        con = testing.db.connect()
        con.execute(
            """create trigger paj on t1 for insert as
            insert into t2 (descr) select descr from inserted"""
        )

        try:
            tr = con.begin()
            r = con.execute(t2.insert(), descr="hello")
            self.assert_(r.inserted_primary_key == [200])
            r = con.execute(t1.insert(), descr="hello")
            self.assert_(r.inserted_primary_key == [100])

        finally:
            tr.commit()
            con.execute("""drop trigger paj""")
            meta.drop_all()
    def test_decimal_notation(self):
        numeric_table = Table(
            "numeric_table",
            metadata,
            Column(
                "id",
                Integer,
                Sequence("numeric_id_seq", optional=True),
                primary_key=True,
            ),
            Column("numericcol", Numeric(precision=38,
                                         scale=20,
                                         asdecimal=True)),
        )
        metadata.create_all()
        test_items = [
            decimal.Decimal(d) for d in (
                "1500000.00000000000000000000",
                "-1500000.00000000000000000000",
                "1500000",
                "0.0000000000000000002",
                "0.2",
                "-0.0000000000000000002",
                "-2E-2",
                "156666.458923543",
                "-156666.458923543",
                "1",
                "-1",
                "-1234",
                "1234",
                "2E-12",
                "4E8",
                "3E-6",
                "3E-7",
                "4.1",
                "1E-1",
                "1E-2",
                "1E-3",
                "1E-4",
                "1E-5",
                "1E-6",
                "1E-7",
                "1E-1",
                "1E-8",
                "0.2732E2",
                "-0.2432E2",
                "4.35656E2",
                "-02452E-2",
                "45125E-2",
                "1234.58965E-2",
                "1.521E+15",
                # previously, these were at -1E-25, which were inserted
                # cleanly however we only got back 20 digits of accuracy.
                # pyodbc as of 4.0.22 now disallows the silent truncation.
                "-1E-20",
                "1E-20",
                "1254E-20",
                "-1203E-20",
                "0",
                "-0.00",
                "-0",
                "4585E12",
                "000000000000000000012",
                "000000000000.32E12",
                "00000000000000.1E+12",
                # these are no longer accepted by pyodbc 4.0.22 but it seems
                # they were not actually round-tripping correctly before that
                # in any case
                # '-1E-25',
                # '1E-25',
                # '1254E-25',
                # '-1203E-25',
                # '000000000000.2E-32',
            )
        ]

        with testing.db.connect() as conn:
            for value in test_items:
                result = conn.execute(numeric_table.insert(),
                                      dict(numericcol=value))
                primary_key = result.inserted_primary_key
                returned = conn.scalar(
                    select([numeric_table.c.numericcol
                            ]).where(numeric_table.c.id == primary_key[0]))
                eq_(value, returned)
    def test_autoincrement(self):
        Table(
            "ai_1",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_2",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_3",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )

        Table(
            "ai_4",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_n2", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_5",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_6",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )
        Table(
            "ai_7",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, autoincrement=True, primary_key=True),
        )
        Table(
            "ai_8",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
        )
        metadata.create_all()

        table_names = [
            "ai_1",
            "ai_2",
            "ai_3",
            "ai_4",
            "ai_5",
            "ai_6",
            "ai_7",
            "ai_8",
        ]
        mr = MetaData(testing.db)

        for name in table_names:
            tbl = Table(name, mr, autoload=True)
            tbl = metadata.tables[name]

            # test that the flag itself reflects appropriately
            for col in tbl.c:
                if "int_y" in col.name:
                    is_(col.autoincrement, True)
                    is_(tbl._autoincrement_column, col)
                else:
                    eq_(col.autoincrement, "auto")
                    is_not(tbl._autoincrement_column, col)

            # mxodbc can't handle scope_identity() with DEFAULT VALUES

            if testing.db.driver == "mxodbc":
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": True})
                ]
            else:
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": False}),
                    engines.testing_engine(
                        options={"implicit_returning": True}),
                ]

            for counter, engine in enumerate(eng):
                with engine.begin() as conn:
                    conn.execute(tbl.insert())
                    if "int_y" in tbl.c:
                        eq_(
                            conn.execute(select([tbl.c.int_y])).scalar(),
                            counter + 1,
                        )
                        assert (list(conn.execute(
                            tbl.select()).first()).count(counter + 1) == 1)
                    else:
                        assert 1 not in list(
                            conn.execute(tbl.select()).first())
                    conn.execute(tbl.delete())
    def _assert_data_noautoincrement(self, table):
        engine = engines.testing_engine(options={"implicit_returning": False})

        with engine.connect() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                    {"data": "d3"},
                )
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                    {"data": "d3"},
                )

            conn.execute(
                table.insert(),
                {
                    "id": 31,
                    "data": "d2"
                },
                {
                    "id": 32,
                    "data": "d3"
                },
            )
            conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData(engine)
        table = Table(table.name, m2, autoload=True)
        with engine.connect() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                    {"data": "d3"},
                )
            conn.execute(
                table.insert(),
                {
                    "id": 31,
                    "data": "d2"
                },
                {
                    "id": 32,
                    "data": "d3"
                },
            )
            conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
    def _assert_data_autoincrement_returning(self, table):
        engine = engines.testing_engine(options={"implicit_returning": True})

        with self.sql_execution_asserter(engine) as asserter:
            with engine.connect() as conn:

                # execute with explicit id

                r = conn.execute(table.insert(), {"id": 30, "data": "d1"})
                eq_(r.inserted_primary_key, [30])

                # execute with prefetch id

                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, [1])

                # executemany with explicit ids

                conn.execute(
                    table.insert(),
                    {
                        "id": 31,
                        "data": "d3"
                    },
                    {
                        "id": 32,
                        "data": "d4"
                    },
                )

                # executemany, uses SERIAL

                conn.execute(table.insert(), {"data": "d5"}, {"data": "d6"})

                # single execute, explicit id, inline

                conn.execute(table.insert(inline=True), {
                    "id": 33,
                    "data": "d7"
                })

                # single execute, inline, uses SERIAL

                conn.execute(table.insert(inline=True), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.connect() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (1, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (2, "d5"),
                    (3, "d6"),
                    (33, "d7"),
                    (4, "d8"),
                ],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData(engine)
        table = Table(table.name, m2, autoload=True)

        with self.sql_execution_asserter(engine) as asserter:
            with engine.connect() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, [5])
                conn.execute(
                    table.insert(),
                    {
                        "id": 31,
                        "data": "d3"
                    },
                    {
                        "id": 32,
                        "data": "d4"
                    },
                )
                conn.execute(table.insert(), {"data": "d5"}, {"data": "d6"})
                conn.execute(table.insert(inline=True), {
                    "id": 33,
                    "data": "d7"
                })
                conn.execute(table.insert(inline=True), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.connect() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (5, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (6, "d5"),
                    (7, "d6"),
                    (33, "d7"),
                    (8, "d8"),
                ],
            )
            conn.execute(table.delete())