Example #1
0
    def test_skip_not_describable(self):
        @event.listens_for(self.metadata, "before_drop")
        def cleanup(*arg, **kw):
            with testing.db.connect() as conn:
                conn.execute("DROP TABLE IF EXISTS test_t1")
                conn.execute("DROP TABLE IF EXISTS test_t2")
                conn.execute("DROP VIEW IF EXISTS test_v")

        with testing.db.connect() as conn:
            conn.execute("CREATE TABLE test_t1 (id INTEGER)")
            conn.execute("CREATE TABLE test_t2 (id INTEGER)")
            conn.execute("CREATE VIEW test_v AS SELECT id FROM test_t1")
            conn.execute("DROP TABLE test_t1")

            m = MetaData()
            with expect_warnings(
                "Skipping .* Table or view named .?test_v.? could not be "
                "reflected: .* references invalid table"
            ):
                m.reflect(views=True, bind=conn)
            eq_(m.tables["test_t2"].name, "test_t2")

            assert_raises_message(
                exc.UnreflectableTableError,
                "references invalid table",
                Table,
                "test_v",
                MetaData(),
                autoload_with=conn,
            )
Example #2
0
    def test_foreignkey_missing_insert(self):
        Table("t1", self.metadata, Column("id", Integer, primary_key=True))
        t2 = Table(
            "t2",
            self.metadata,
            Column("id", Integer, ForeignKey("t1.id"), primary_key=True),
        )
        self.metadata.create_all()

        # want to ensure that "null value in column "id" violates not-
        # null constraint" is raised (IntegrityError on psycoopg2, but
        # ProgrammingError on pg8000), and not "ProgrammingError:
        # (ProgrammingError) relationship "t2_id_seq" does not exist".
        # the latter corresponds to autoincrement behavior, which is not
        # the case here due to the foreign key.

        for eng in [
            engines.testing_engine(options={"implicit_returning": False}),
            engines.testing_engine(options={"implicit_returning": True}),
        ]:
            with expect_warnings(
                ".*has no Python-side or server-side default.*"
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    eng.execute,
                    t2.insert(),
                )
Example #3
0
    def test_unsupported_casts(self):

        t = sql.table('t', sql.column('col'))
        m = mysql

        specs = [
            (m.MSBit, "t.col"),

            (FLOAT, "t.col"),
            (Float, "t.col"),
            (m.MSFloat, "t.col"),
            (m.MSDouble, "t.col"),
            (m.MSReal, "t.col"),

            (m.MSYear, "t.col"),
            (m.MSYear(2), "t.col"),

            (Boolean, "t.col"),
            (BOOLEAN, "t.col"),

            (m.MSEnum, "t.col"),
            (m.MSEnum("1", "2"), "t.col"),
            (m.MSSet, "t.col"),
            (m.MSSet("1", "2"), "t.col"),
        ]

        for type_, expected in specs:
            with expect_warnings(
                "Datatype .* does not support CAST on MySQL;"
            ):
                self.assert_compile(cast(t.c.col, type_), expected)
Example #4
0
    def test_reconnect_on_reentrant_plus_closewresult(self):
        conn = self.db.connect(close_with_result=True)

        self.dbapi.shutdown("rollback")

        # raises error
        with expect_warnings(
            "An exception has occurred during handling .*"
            "something broke on execute but we didn't lose the connection",
            py2konly=True,
        ):
            assert_raises_message(
                tsa.exc.DBAPIError,
                "Lost the DB connection on rollback",
                conn.execute,
                select([1]),
            )

        assert conn.closed
        assert conn.invalidated

        assert_raises_message(
            tsa.exc.StatementError,
            "This Connection is closed",
            conn.execute,
            select([1]),
        )
Example #5
0
    def test_report_primary_error_when_rollback_fails(self):
        User, users = self.classes.User, self.tables.users

        mapper(User, users)

        session = Session(testing.db)

        with expect_warnings(".*during handling of a previous exception.*"):
            session.begin_nested()
            savepoint = session.\
                connection()._Connection__transaction._savepoint

            # force the savepoint to disappear
            session.connection().dialect.do_release_savepoint(
                session.connection(), savepoint
            )

            # now do a broken flush
            session.add_all([User(id=1), User(id=1)])

            assert_raises_message(
                sa_exc.DBAPIError,
                "ROLLBACK TO SAVEPOINT ",
                session.flush
            )
Example #6
0
    def test_unsupported_cast_literal_bind(self):
        expr = cast(column("foo", Integer) + 5, Float)

        with expect_warnings("Datatype FLOAT does not support CAST on MySQL;"):
            self.assert_compile(expr, "(foo + 5)", literal_binds=True)

        dialect = mysql.MySQLDialect()
        dialect.server_version_info = (3, 9, 8)
        with expect_warnings("Current MySQL version does not support CAST"):
            eq_(
                str(
                    expr.compile(
                        dialect=dialect, compile_kwargs={"literal_binds": True}
                    )
                ),
                "(foo + 5)",
            )
Example #7
0
 def test_cast_grouped_expression_pre_4(self):
     dialect = mysql.dialect()
     dialect.server_version_info = (3, 2, 3)
     with expect_warnings("Current MySQL version does not support CAST;"):
         self.assert_compile(
             cast(sql.column('x') + sql.column('y'), Integer),
             "(x + y)",
             dialect=dialect
         )
Example #8
0
 def test_execution_options_ignored_mid_transaction(self):
     bind = mock.Mock()
     conn = mock.Mock(engine=bind)
     bind.contextual_connect = mock.Mock(return_value=conn)
     sess = Session(bind=bind)
     sess.execute("select 1")
     with expect_warnings(
             "Connection is already established for the "
             "given bind; execution_options ignored"):
         sess.connection(execution_options={'isolation_level': 'FOO'})
Example #9
0
    def test_not_supported(self):
        dialect, connection = self._fixture(None)

        with expect_warnings("Could not fetch transaction isolation level"):
            assert_raises_message(
                NotImplementedError,
                "Can't fetch isolation",
                dialect.get_isolation_level,
                connection,
            )
Example #10
0
 def test_no_cast_pre_4(self):
     self.assert_compile(
         cast(Column("foo", Integer), String), "CAST(foo AS CHAR)"
     )
     dialect = mysql.dialect()
     dialect.server_version_info = (3, 2, 3)
     with expect_warnings("Current MySQL version does not support CAST;"):
         self.assert_compile(
             cast(Column("foo", Integer), String), "foo", dialect=dialect
         )
Example #11
0
 def _test_warning(self, stmt, offending_clause, expected):
     with expect_warnings(
         "Can't resolve label reference %r;" % offending_clause
     ):
         self.assert_compile(stmt, expected)
     assert_raises_message(
         exc.SAWarning,
         "Can't resolve label reference %r; converting to text"
         % offending_clause,
         stmt.compile,
     )
Example #12
0
    def test_warn_on_unannotated_matched_column(self):
        User = self.classes.User

        compiler = evaluator.EvaluatorCompiler(User)

        with expect_warnings(
            r"Evaluating non-mapped column expression 'othername' "
                "onto ORM instances; this is a deprecated use case."):
            meth = compiler.process(User.name == Column('othername', String))

        u1 = User(id=5)
        meth(u1)
Example #13
0
 def test_anticipate_no_pk_lower_case_table(self):
     t = table(
         "t",
         Column("id", Integer, primary_key=True, autoincrement=False),
         Column("notpk", String(10), nullable=True),
     )
     with expect_warnings(
         "Column 't.id' is marked as a member.*" "may not store NULL.$"
     ):
         self.assert_compile(
             t.insert(), "INSERT INTO t () VALUES ()", params={}
         )
Example #14
0
    def _test(self, fn, arg, offending_clause, expected):
        with expect_warnings("Textual "):
            stmt = fn(arg)
            self.assert_compile(stmt, expected)

        assert_raises_message(
            exc.SAWarning,
            r"Textual (?:SQL|column|SQL FROM) expression %(stmt)r should be "
            r"explicitly declared (?:with|as) text\(%(stmt)r\)"
            % {"stmt": util.ellipses_string(offending_clause)},
            fn,
            arg,
        )
Example #15
0
 def test_anticipate_no_pk_non_composite_pk(self):
     t = Table(
         "t",
         MetaData(),
         Column("x", Integer, primary_key=True, autoincrement=False),
         Column("q", Integer),
     )
     with expect_warnings(
         "Column 't.x' is marked as a member.*" "may not store NULL.$"
     ):
         self.assert_compile(
             t.insert(), "INSERT INTO t (q) VALUES (:q)", params={"q": 5}
         )
Example #16
0
    def test_anticipate_no_pk_composite_pk(self):
        t = Table(
            "t",
            MetaData(),
            Column("x", Integer, primary_key=True),
            Column("y", Integer, primary_key=True),
        )

        with expect_warnings(
            "Column 't.y' is marked as a member.*"
            "Note that as of SQLAlchemy 1.1,"
        ):
            self.assert_compile(
                t.insert(), "INSERT INTO t (x) VALUES (:x)", params={"x": 5}
            )
Example #17
0
    def test_ensure_is_disconnect_gets_connection(self):
        def is_disconnect(e, conn, cursor):
            # connection is still present
            assert conn.connection is not None
            # the error usually occurs on connection.cursor(),
            # though MySQLdb we get a non-working cursor.
            # assert cursor is None

        self.engine.dialect.is_disconnect = is_disconnect
        conn = self.engine.connect()
        self.engine.test_shutdown()
        with expect_warnings(
            "An exception has occurred during handling .*", py2konly=True
        ):
            assert_raises(tsa.exc.DBAPIError, conn.execute, select([1]))
Example #18
0
    def test_correlated_update_two(self, update_from_fixture):
        table1, t2 = update_from_fixture

        mt = table1.alias()
        with testing.expect_warnings(
                "implicitly coercing SELECT object to scalar subquery"):
            u = update(table1).values(
                {
                    table1.c.name:
                    select(mt.c.name).where(mt.c.myid == table1.c.myid)
                }, )
        self.assert_compile(
            u,
            "UPDATE mytable SET name=(SELECT mytable_1.name FROM "
            "mytable AS mytable_1 WHERE "
            "mytable_1.myid = mytable.myid)",
        )
Example #19
0
    def test_no_show_variables(self):
        from sqlalchemy.testing import mock

        engine = engines.testing_engine()

        def my_execute(self, statement, *args, **kw):
            if statement.startswith("SHOW VARIABLES"):
                statement = "SELECT 1 FROM DUAL WHERE 1=0"
            return real_exec(self, statement, *args, **kw)

        real_exec = engine._connection_cls._execute_text
        with mock.patch.object(engine._connection_cls, "_execute_text",
                               my_execute):
            with expect_warnings(
                    "Could not retrieve SQL_MODE; please ensure the "
                    "MySQL user has permissions to SHOW VARIABLES"):
                engine.connect()
Example #20
0
 def test_anticipate_no_pk_composite_pk_prefetch(self):
     t = Table(
         "t",
         MetaData(),
         Column("x", Integer, primary_key=True),
         Column("y", Integer, primary_key=True),
     )
     d = postgresql.dialect()
     d.implicit_returning = False
     with expect_warnings("Column 't.y' is marked as a member.*"
                          "Note that as of SQLAlchemy 1.1,"):
         self.assert_compile(
             t.insert(),
             "INSERT INTO t (x) VALUES (%(x)s)",
             params={"x": 5},
             dialect=d,
         )
Example #21
0
 def test_anticipate_no_pk_non_composite_pk_implicit_returning(self):
     t = Table(
         "t",
         MetaData(),
         Column("x", Integer, primary_key=True, autoincrement=False),
         Column("q", Integer),
     )
     d = postgresql.dialect()
     d.implicit_returning = True
     with expect_warnings("Column 't.x' is marked as a member.*"
                          "may not store NULL.$"):
         self.assert_compile(
             t.insert(),
             "INSERT INTO t (q) VALUES (%(q)s)",
             params={"q": 5},
             dialect=d,
         )
Example #22
0
 def test_anticipate_no_pk_composite_pk_prefetch(self):
     t = Table(
         't', MetaData(), Column('x', Integer, primary_key=True),
         Column('y', Integer, primary_key=True)
     )
     d = postgresql.dialect()
     d.implicit_returning = False
     with expect_warnings(
         "Column 't.y' is marked as a member.*"
         "Note that as of SQLAlchemy 1.1,"
     ):
         self.assert_compile(
             t.insert(),
             "INSERT INTO t (x) VALUES (%(x)s)",
             params={'x': 5},
             dialect=d
         )
Example #23
0
    def test_no_show_variables(self):
        from sqlalchemy.testing import mock
        engine = engines.testing_engine()

        def my_execute(self, statement, *args, **kw):
            if statement.startswith("SHOW VARIABLES"):
                statement = "SELECT 1 FROM DUAL WHERE 1=0"
            return real_exec(self, statement, *args, **kw)

        real_exec = engine._connection_cls._execute_text
        with mock.patch.object(
                engine._connection_cls, "_execute_text", my_execute):
            with expect_warnings(
                "Could not retrieve SQL_MODE; please ensure the "
                "MySQL user has permissions to SHOW VARIABLES"
            ):
                engine.connect()
Example #24
0
 def test_anticipate_no_pk_non_composite_pk_implicit_returning(self):
     t = Table(
         "t",
         MetaData(),
         Column("x", Integer, primary_key=True, autoincrement=False),
         Column("q", Integer),
     )
     d = postgresql.dialect()
     d.implicit_returning = True
     with expect_warnings(
         "Column 't.x' is marked as a member.*" "may not store NULL.$"
     ):
         self.assert_compile(
             t.insert(),
             "INSERT INTO t (q) VALUES (%(q)s)",
             params={"q": 5},
             dialect=d,
         )
    def test_no_default_isolation_level(self):
        from sqlalchemy.testing import mock

        engine = engines.testing_engine()

        real_isolation_level = testing.db.dialect.get_isolation_level

        def fake_isolation_level(connection):
            connection = mock.Mock(cursor=mock.Mock(return_value=mock.Mock(
                fetchone=mock.Mock(return_value=None))))
            return real_isolation_level(connection)

        with mock.patch.object(engine.dialect, "get_isolation_level",
                               fake_isolation_level):
            with expect_warnings(
                    "Could not retrieve transaction isolation level for MySQL "
                    "connection."):
                engine.connect()
Example #26
0
    def test_older_cx_oracle_warning(self, cx_Oracle, cx_oracle_type):
        cx_Oracle.version = "6.3"

        ignore_dialect = cx_oracle.dialect(
            dbapi=cx_Oracle, encoding_errors="ignore"
        )
        ignore_outputhandler = (
            ignore_dialect._generate_connection_outputtype_handler()
        )

        cursor = mock.Mock()

        with testing.expect_warnings(
            r"cx_oracle version \(6, 3\) does not support encodingErrors"
        ):
            ignore_outputhandler(
                cursor, "foo", cx_oracle_type, None, None, None
            )
Example #27
0
    def test_computed_update_warning(self, connection):
        test = self.tables.test
        conn = connection
        conn.execute(test.insert(), {"id": 1, "foo": 5})

        if testing.db.dialect._supports_update_returning_computed_cols:
            result = conn.execute(
                test.update().values(foo=10).return_defaults())
            eq_(result.returned_defaults, (52, ))
        else:
            with testing.expect_warnings(
                    "Computed columns don't work with Oracle UPDATE"):
                result = conn.execute(
                    test.update().values(foo=10).return_defaults())

                # returns the *old* value
                eq_(result.returned_defaults, (47, ))

        eq_(conn.scalar(select(test.c.bar)), 52)
Example #28
0
    def test_savepoint_release_fails_warning(self):
        with testing.db.connect() as connection:
            connection.begin()

            with expect_warnings(
                "An exception has occurred during handling of a previous "
                "exception.  The previous exception "
                r"is:.*..SQL\:.*RELEASE SAVEPOINT"
            ):

                def go():
                    with connection.begin_nested() as savepoint:
                        connection.dialect.do_release_savepoint(
                            connection, savepoint._savepoint
                        )

                assert_raises_message(
                    exc.DBAPIError, r".*SQL\:.*ROLLBACK TO SAVEPOINT", go
                )
Example #29
0
    def test_savepoint_release_fails_warning(self):
        with testing.db.connect() as connection:
            connection.begin()

            with expect_warnings(
                "An exception has occurred during handling of a previous "
                "exception.  The previous exception "
                r"is:.*..SQL\:.*RELEASE SAVEPOINT"
            ):

                def go():
                    with connection.begin_nested() as savepoint:
                        connection.dialect.do_release_savepoint(
                            connection, savepoint._savepoint
                        )

                assert_raises_message(
                    exc.DBAPIError, r".*SQL\:.*ROLLBACK TO SAVEPOINT", go
                )
Example #30
0
    def test_mariadb_check_warning(self):

        for expect_, version in [
            (True, (10, 2, 7, 'MariaDB')),
            (True, (5, 6, 15, 10, 2, 7, 'MariaDB')),
            (False, (10, 2, 10, 'MariaDB')),
            (False, (5, 7, 20)),
            (False, (5, 6, 15)),
            (True, (10, 2, 6, 'MariaDB', 10, 2, '6+maria~stretch', 'log')),
        ]:
            dialect = mysql.dialect()
            dialect.server_version_info = version
            if expect_:
                with expect_warnings(
                        ".*before 10.2.9 has known issues regarding "
                        "CHECK constraints"):
                    dialect._warn_for_known_db_issues()
            else:
                dialect._warn_for_known_db_issues()
Example #31
0
    def test_subqueryload_external_lambda_caveats(self, plain_fixture):
        User, Address = plain_fixture

        s = Session(testing.db, future=True)

        def query(names):
            stmt = lambda_stmt(
                lambda: select(User)
                .where(User.name.in_(names))
                .options(subqueryload(User.addresses))
            ) + (lambda s: s.order_by(User.id))

            return s.execute(stmt)

        def go1():
            r1 = query(["ed"])
            eq_(
                r1.scalars().all(),
                [User(name="ed", addresses=[Address(), Address(), Address()])],
            )

        def go2():
            r1 = query(["ed", "fred"])
            eq_(
                r1.scalars().all(),
                [
                    User(
                        name="ed", addresses=[Address(), Address(), Address()]
                    ),
                    User(name="fred", addresses=[Address()]),
                ],
            )

        for i in range(5):
            fn = random.choice([go1, go2])
            with testing.expect_warnings(
                'subqueryloader for "User.addresses" must invoke lambda '
                r"callable at .*LambdaElement\(<code object <lambda> "
                r".*test_lambdas.py.* in order to produce a new query, "
                r"decreasing the efficiency of caching"
            ):
                self.assert_sql_count(testing.db, fn, 2)
Example #32
0
    def test_lazyload_extra_criteria_not_supported(self):
        users, addresses = (self.tables.users, self.tables.addresses)

        self.mapper_registry.map_imperatively(
            User,
            users,
            properties={"addresses": relationship(Address)},
        )
        self.mapper_registry.map_imperatively(Address, addresses)

        sess = fixture_session()
        u1 = User(
            name="ed",
            addresses=[
                Address(email_address="*****@*****.**"),
                Address(email_address="*****@*****.**"),
            ],
        )

        sess.add(u1)
        sess.commit()
        sess.close()

        u1 = (
            sess.query(User)
            .options(
                lazyload(
                    User.addresses.and_(Address.email_address == "*****@*****.**")
                )
            )
            .first()
        )
        with testing.expect_warnings(
            r"Can't reliably serialize a lazyload\(\) option"
        ):
            u2 = pickle.loads(pickle.dumps(u1))

        eq_(len(u1.addresses), 1)

        sess = fixture_session()
        sess.add(u2)
        eq_(len(u2.addresses), 2)
Example #33
0
    def test_reconnect_on_reentrant(self):
        conn = self.db.connect()

        conn.execute(select([1]))

        assert len(self.dbapi.connections) == 1

        self.dbapi.shutdown("rollback")

        # raises error
        with expect_warnings(
                "An exception has occurred during handling .*"
                "something broke on execute but we didn't lose the connection",
                py2konly=True):
            assert_raises_message(tsa.exc.DBAPIError,
                                  "Lost the DB connection on rollback",
                                  conn.execute, select([1]))

        assert not conn.closed
        assert conn.invalidated
Example #34
0
    def test_reconnect_on_reentrant_plus_closewresult(self):
        conn = self.db.connect(close_with_result=True)

        self.dbapi.shutdown("rollback")

        # raises error
        with expect_warnings(
                "An exception has occurred during handling .*"
                "something broke on execute but we didn't lose the connection",
                py2konly=True):
            assert_raises_message(tsa.exc.DBAPIError,
                                  "Lost the DB connection on rollback",
                                  conn.execute, select([1]))

        assert conn.closed
        assert conn.invalidated

        assert_raises_message(tsa.exc.StatementError,
                              "This Connection is closed", conn.execute,
                              select([1]))
Example #35
0
    def test_warning_in_transaction(self):
        eng = testing_engine()
        c1 = eng.connect()
        with expect_warnings(
                "Connection is already established with a Transaction; "
                "setting isolation_level may implicitly rollback or commit "
                "the existing transaction, or have no effect until next "
                "transaction"):
            with c1.begin():
                c1 = c1.execution_options(
                    isolation_level=self._non_default_isolation_level())

                eq_(
                    eng.dialect.get_isolation_level(c1.connection),
                    self._non_default_isolation_level(),
                )
        # stays outside of transaction
        eq_(
            eng.dialect.get_isolation_level(c1.connection),
            self._non_default_isolation_level(),
        )
Example #36
0
    def test_report_primary_error_when_rollback_fails(self):
        User, users = self.classes.User, self.tables.users

        mapper(User, users)

        session = Session(testing.db)

        with expect_warnings(".*during handling of a previous exception.*"):
            session.begin_nested()
            savepoint = session.\
                connection()._Connection__transaction._savepoint

            # force the savepoint to disappear
            session.connection().dialect.do_release_savepoint(
                session.connection(), savepoint)

            # now do a broken flush
            session.add_all([User(id=1), User(id=1)])

            assert_raises_message(sa_exc.DBAPIError, "ROLLBACK TO SAVEPOINT ",
                                  session.flush)
Example #37
0
    def test_reconnect_on_reentrant(self):
        conn = self.db.connect()

        conn.execute(select([1]))

        assert len(self.dbapi.connections) == 1

        self.dbapi.shutdown("rollback")

        # raises error
        with expect_warnings(
            "An exception has occurred during handling .*"
            "something broke on execute but we didn't lose the connection",
            py2konly=True
        ):
            assert_raises_message(
                tsa.exc.DBAPIError,
                "Lost the DB connection on rollback",
                conn.execute, select([1])
            )

        assert not conn.closed
        assert conn.invalidated
Example #38
0
    def test_warning_in_transaction(self):
        eng = testing_engine()
        c1 = eng.connect()
        with expect_warnings(
            "Connection is already established with a Transaction; "
            "setting isolation_level may implicitly rollback or commit "
            "the existing transaction, or have no effect until next "
            "transaction"
        ):
            with c1.begin():
                c1 = c1.execution_options(
                    isolation_level=self._non_default_isolation_level()
                )

                eq_(
                    eng.dialect.get_isolation_level(c1.connection),
                    self._non_default_isolation_level(),
                )
        # stays outside of transaction
        eq_(
            eng.dialect.get_isolation_level(c1.connection),
            self._non_default_isolation_level(),
        )
    def test_warning_w_no_recursive_opt(self, loader_fn, depth,
                                        limited_cache_conn):
        connection = limited_cache_conn(27)

        Node = self.classes.Node

        for i in range(2):
            stmt = (select(Node).filter(Node.id == 1).options(
                self._stack_loaders(loader_fn, depth)))

            # note this is a magic number, it's not important that it's exact,
            # just that when someone makes a huge recursive thing,
            # it warns
            if depth > 8:
                with expect_warnings(
                        "Loader depth for query is excessively deep; "
                        "caching will be disabled for additional loaders."):
                    with Session(connection) as s:
                        result = s.scalars(stmt)
                        self._assert_depth(result.one(), depth)
            else:
                with Session(connection) as s:
                    result = s.scalars(stmt)
                    self._assert_depth(result.one(), depth)
Example #40
0
 def test_cast_grouped_expression_non_castable(self):
     with expect_warnings("Datatype FLOAT does not support CAST on MySQL;"):
         self.assert_compile(
             cast(sql.column('x') + sql.column('y'), Float),
             "(x + y)"
         )
Example #41
0
    def _assert_data_noautoincrement(self, table):
        engine = engines.testing_engine(options={"implicit_returning": False})

        with engine.connect() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

            with expect_warnings(
                ".*has no Python-side or server-side default.*"
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )
            with expect_warnings(
                ".*has no Python-side or server-side default.*"
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                    {"data": "d3"},
                )
            with expect_warnings(
                ".*has no Python-side or server-side default.*"
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )
            with expect_warnings(
                ".*has no Python-side or server-side default.*"
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                    {"data": "d3"},
                )

            conn.execute(
                table.insert(),
                {"id": 31, "data": "d2"},
                {"id": 32, "data": "d3"},
            )
            conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData(engine)
        table = Table(table.name, m2, autoload=True)
        with engine.connect() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

            with expect_warnings(
                ".*has no Python-side or server-side default.*"
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )
            with expect_warnings(
                ".*has no Python-side or server-side default.*"
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                    {"data": "d3"},
                )
            conn.execute(
                table.insert(),
                {"id": 31, "data": "d2"},
                {"id": 32, "data": "d3"},
            )
            conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
    def _assert_data_noautoincrement(self, table):
        engine = \
            engines.testing_engine(options={'implicit_returning': False})

        with engine.connect() as conn:
            conn.execute(table.insert(), {'id': 30, 'data': 'd1'})

            with expect_warnings(
                    ".*has no Python-side or server-side default.*",
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute, table.insert(), {'data': 'd2'})
            with expect_warnings(
                    ".*has no Python-side or server-side default.*",
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute, table.insert(), {'data': 'd2'},
                    {'data': 'd3'})
            with expect_warnings(
                    ".*has no Python-side or server-side default.*",
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute, table.insert(), {'data': 'd2'})
            with expect_warnings(
                    ".*has no Python-side or server-side default.*",
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute, table.insert(), {'data': 'd2'},
                    {'data': 'd3'})

            conn.execute(
                table.insert(),
                {'id': 31, 'data': 'd2'}, {'id': 32, 'data': 'd3'})
            conn.execute(table.insert(inline=True), {'id': 33, 'data': 'd4'})
            eq_(conn.execute(table.select()).fetchall(), [
                (30, 'd1'),
                (31, 'd2'),
                (32, 'd3'),
                (33, 'd4')])
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData(engine)
        table = Table(table.name, m2, autoload=True)
        with engine.connect() as conn:
            conn.execute(table.insert(), {'id': 30, 'data': 'd1'})

            with expect_warnings(
                    ".*has no Python-side or server-side default.*",
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute, table.insert(), {'data': 'd2'})
            with expect_warnings(
                    ".*has no Python-side or server-side default.*",
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute, table.insert(), {'data': 'd2'},
                    {'data': 'd3'})
            conn.execute(
                table.insert(),
                {'id': 31, 'data': 'd2'}, {'id': 32, 'data': 'd3'})
            conn.execute(table.insert(inline=True), {'id': 33, 'data': 'd4'})
            eq_(conn.execute(table.select()).fetchall(), [
                (30, 'd1'),
                (31, 'd2'),
                (32, 'd3'),
                (33, 'd4')])
Example #43
0
    def _assert_data_noautoincrement(self, table):
        engine = engines.testing_engine(options={"implicit_returning": False})

        # turning off the cache because we are checking for compile-time
        # warnings
        engine = engine.execution_options(compiled_cache=None)

        with engine.begin() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    [{
                        "data": "d2"
                    }, {
                        "data": "d3"
                    }],
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    [{
                        "data": "d2"
                    }, {
                        "data": "d3"
                    }],
                )

        with engine.begin() as conn:
            conn.execute(
                table.insert(),
                [{
                    "id": 31,
                    "data": "d2"
                }, {
                    "id": 32,
                    "data": "d3"
                }],
            )
            conn.execute(table.insert().inline(), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData()
        table = Table(table.name, m2, autoload_with=engine)
        with engine.begin() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    [{
                        "data": "d2"
                    }, {
                        "data": "d3"
                    }],
                )

        with engine.begin() as conn:
            conn.execute(
                table.insert(),
                [{
                    "id": 31,
                    "data": "d2"
                }, {
                    "id": 32,
                    "data": "d3"
                }],
            )
            conn.execute(table.insert().inline(), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
 def test_cast_grouped_expression_non_castable(self):
     with expect_warnings("Datatype FLOAT does not support CAST on MySQL;"):
         self.assert_compile(cast(sql.column('x') + sql.column('y'), Float),
                             "(x + y)")
Example #45
0
    def test_unsupported_casts(self, type_, expected):

        t = sql.table("t", sql.column("col"))
        with expect_warnings("Datatype .* does not support CAST on MySQL;"):
            self.assert_compile(cast(t.c.col, type_), expected)
Example #46
0
    def test_not_supported(self):
        dialect, connection = self._fixture(None)

        with expect_warnings("Could not fetch transaction isolation level"):
            assert_raises_message(NotImplementedError, "Can't fetch isolation",
                                  dialect.get_isolation_level, connection)
Example #47
0
 def _expect_max_ident_warning(self):
     return testing.expect_warnings(
         "Oracle version .* is known to have a maximum "
         "identifier length of 128")
Example #48
0
    def test_unsupported_cast_literal_bind(self):
        expr = cast(column("foo", Integer) + 5, Float)

        with expect_warnings(
                "Datatype FLOAT does not support CAST on MySQL/MariaDb;"):
            self.assert_compile(expr, "(foo + 5)", literal_binds=True)
Example #49
0
    def test_manytoone_deferred_relationship_expr(self):
        """for [ticket:4359], test that updates to the columns embedded
        in an object expression are also updated."""
        users, Address, addresses, User = (
            self.tables.users,
            self.classes.Address,
            self.tables.addresses,
            self.classes.User,
        )

        mapper(User, users)
        mapper(
            Address,
            addresses,
            properties={
                "user":
                relationship(
                    User,
                    passive_updates=testing.requires.on_update_cascade.enabled,
                )
            },
        )

        s = Session()
        a1 = Address(email="jack1")
        u1 = User(username="******", fullname="jack")

        a1.user = u1

        # scenario 1.  object is still transient, we get a value.
        expr = Address.user == u1

        eq_(expr.left.callable(), "jack")

        # scenario 2.  value has been changed while we are transient.
        # we get the updated value.
        u1.username = "******"
        eq_(expr.left.callable(), "ed")

        s.add_all([u1, a1])
        s.commit()

        eq_(a1.username, "ed")

        # scenario 3.  the value is changed and flushed, we get the new value.
        u1.username = "******"
        s.flush()

        eq_(expr.left.callable(), "fred")

        # scenario 4.  the value is changed, flushed, and expired.
        # the callable goes out to get that value.
        u1.username = "******"
        s.commit()
        assert "username" not in u1.__dict__

        eq_(expr.left.callable(), "wendy")

        # scenario 5.  the value is changed flushed, expired,
        # and then when we hit the callable, we are detached.
        u1.username = "******"
        s.commit()
        assert "username" not in u1.__dict__

        s.expunge(u1)

        # InstanceState has a "last known values" feature we use
        # to pick up on this
        eq_(expr.left.callable(), "jack")

        # doesn't unexpire the attribute
        assert "username" not in u1.__dict__

        # once we are persistent again, we check the DB
        s.add(u1)
        eq_(expr.left.callable(), "jack")
        assert "username" in u1.__dict__

        # scenario 6.  we are using del
        u2 = User(username="******", fullname="jack")
        expr = Address.user == u2

        eq_(expr.left.callable(), "jack")

        del u2.username

        assert_raises_message(
            sa.exc.InvalidRequestError,
            "Can't resolve value for column users.username",
            expr.left.callable,
        )

        u2.username = "******"
        eq_(expr.left.callable(), "ed")

        s.add(u2)
        s.commit()

        eq_(expr.left.callable(), "ed")

        del u2.username

        # object is persistent, so since we deleted, we get None
        with expect_warnings("Got None for value of column "):
            eq_(expr.left.callable(), None)

        s.expunge(u2)

        # however that None isn't in the dict, that's just the default
        # attribute value, so after expunge it's gone
        assert "username" not in u2.__dict__

        # detached, we don't have it
        assert_raises_message(
            sa.exc.InvalidRequestError,
            "Can't resolve value for column users.username",
            expr.left.callable,
        )
Example #50
0
    def test_replace_function_case_sensitive(self):
        reg = functions._registry["_default"]
        cs_reg = functions._case_sensitive_registry["_default"]

        class replaceable_func(GenericFunction):
            type = Integer
            identifier = "REPLACEABLE_FUNC"

        assert isinstance(func.REPLACEABLE_FUNC().type, Integer)
        assert isinstance(func.Replaceable_Func().type, Integer)
        assert isinstance(func.RePlAcEaBlE_fUnC().type, Integer)
        assert isinstance(func.replaceable_func().type, Integer)

        in_("replaceable_func", reg)
        not_in_("REPLACEABLE_FUNC", reg)
        not_in_("Replaceable_Func", reg)
        in_("replaceable_func", cs_reg)
        eq_(set(cs_reg["replaceable_func"].keys()), set(["REPLACEABLE_FUNC"]))

        with testing.expect_deprecated(
                "GenericFunction 'Replaceable_Func' is already registered with"
                " different letter case, so the previously registered function "
                "'REPLACEABLE_FUNC' is switched into case-sensitive mode. "
                "GenericFunction objects will be fully case-insensitive in a "
                "future release.",
                regex=False,
        ):

            class Replaceable_Func(GenericFunction):
                type = DateTime
                identifier = "Replaceable_Func"

        assert isinstance(func.REPLACEABLE_FUNC().type, Integer)
        assert isinstance(func.Replaceable_Func().type, DateTime)
        assert isinstance(func.RePlAcEaBlE_fUnC().type, NullType)
        assert isinstance(func.replaceable_func().type, NullType)

        eq_(reg["replaceable_func"], functions._CASE_SENSITIVE)
        not_in_("REPLACEABLE_FUNC", reg)
        not_in_("Replaceable_Func", reg)
        in_("replaceable_func", cs_reg)
        eq_(
            set(cs_reg["replaceable_func"].keys()),
            set(["REPLACEABLE_FUNC", "Replaceable_Func"]),
        )

        with testing.expect_warnings(
                "The GenericFunction 'REPLACEABLE_FUNC' is already registered and "
                "is going to be overriden.",
                regex=False,
        ):

            class replaceable_func_override(GenericFunction):
                type = DateTime
                identifier = "REPLACEABLE_FUNC"

        with testing.expect_deprecated(
                "GenericFunction(s) '['REPLACEABLE_FUNC', 'Replaceable_Func']' "
                "are already registered with different letter cases and might "
                "interact with 'replaceable_func'. GenericFunction objects will "
                "be fully case-insensitive in a future release.",
                regex=False,
        ):

            class replaceable_func_lowercase(GenericFunction):
                type = String
                identifier = "replaceable_func"

        with testing.expect_warnings(
                "The GenericFunction 'Replaceable_Func' is already registered and "
                "is going to be overriden.",
                regex=False,
        ):

            class Replaceable_Func_override(GenericFunction):
                type = Integer
                identifier = "Replaceable_Func"

        assert isinstance(func.REPLACEABLE_FUNC().type, DateTime)
        assert isinstance(func.Replaceable_Func().type, Integer)
        assert isinstance(func.RePlAcEaBlE_fUnC().type, NullType)
        assert isinstance(func.replaceable_func().type, String)

        eq_(reg["replaceable_func"], functions._CASE_SENSITIVE)
        not_in_("REPLACEABLE_FUNC", reg)
        not_in_("Replaceable_Func", reg)
        in_("replaceable_func", cs_reg)
        eq_(
            set(cs_reg["replaceable_func"].keys()),
            set(["REPLACEABLE_FUNC", "Replaceable_Func", "replaceable_func"]),
        )