Example #1
0
 def test_rowcount_flag(self):
     metadata = self.metadata
     engine = engines.testing_engine(options={"enable_rowcount": True})
     assert engine.dialect.supports_sane_rowcount
     metadata.bind = engine
     t = Table("t1", metadata, Column("data", String(10)))
     metadata.create_all()
     r = t.insert().execute({"data": "d1"}, {"data": "d2"}, {"data": "d3"})
     r = t.update().where(t.c.data == "d2").values(data="d3").execute()
     eq_(r.rowcount, 1)
     r = t.delete().where(t.c.data == "d3").execute()
     eq_(r.rowcount, 2)
     r = t.delete().execution_options(enable_rowcount=False).execute()
     eq_(r.rowcount, -1)
     engine.dispose()
     engine = engines.testing_engine(options={"enable_rowcount": False})
     assert not engine.dialect.supports_sane_rowcount
     metadata.bind = engine
     r = t.insert().execute({"data": "d1"}, {"data": "d2"}, {"data": "d3"})
     r = t.update().where(t.c.data == "d2").values(data="d3").execute()
     eq_(r.rowcount, -1)
     r = t.delete().where(t.c.data == "d3").execute()
     eq_(r.rowcount, -1)
     r = t.delete().execution_options(enable_rowcount=True).execute()
     eq_(r.rowcount, 1)
     r.close()
     engine.dispose()
Example #2
0
    def test_foreignkey_missing_insert(self):
        t1 = Table('t1', metadata, Column('id', Integer,
                                          primary_key=True))
        t2 = Table(
            't2',
            metadata,
            Column(
                'id',
                Integer,
                ForeignKey('t1.id'),
                primary_key=True))
        metadata.create_all()

        # want to ensure that "null value in column "id" violates not-
        # null constraint" is raised (IntegrityError on psycoopg2, but
        # ProgrammingError on pg8000), and not "ProgrammingError:
        # (ProgrammingError) relationship "t2_id_seq" does not exist".
        # the latter corresponds to autoincrement behavior, which is not
        # the case here due to the foreign key.

        for eng in [
            engines.testing_engine(options={'implicit_returning': False}),
            engines.testing_engine(options={'implicit_returning': True})
        ]:
            assert_raises_message(exc.DBAPIError,
                                  'violates not-null constraint',
                                  eng.execute, t2.insert())
    def _init_dbs(self):
        db1 = testing_engine("sqlite:///shard1.db", options=dict(pool_threadlocal=True))
        db2 = testing_engine("sqlite:///shard2.db")
        db3 = testing_engine("sqlite:///shard3.db")
        db4 = testing_engine("sqlite:///shard4.db")

        return db1, db2, db3, db4
Example #4
0
 def test_rowcount_flag(self):
     metadata = self.metadata
     engine = engines.testing_engine(
                     options={'enable_rowcount': True})
     assert engine.dialect.supports_sane_rowcount
     metadata.bind = engine
     t = Table('t1', metadata, Column('data', String(10)))
     metadata.create_all()
     r = t.insert().execute({'data': 'd1'}, {'data': 'd2'}, {'data'
                            : 'd3'})
     r = t.update().where(t.c.data == 'd2').values(data='d3'
             ).execute()
     eq_(r.rowcount, 1)
     r = t.delete().where(t.c.data == 'd3').execute()
     eq_(r.rowcount, 2)
     r = \
         t.delete().execution_options(enable_rowcount=False).execute()
     eq_(r.rowcount, -1)
     engine.dispose()
     engine = engines.testing_engine(options={'enable_rowcount'
             : False})
     assert not engine.dialect.supports_sane_rowcount
     metadata.bind = engine
     r = t.insert().execute({'data': 'd1'}, {'data': 'd2'}, {'data'
                            : 'd3'})
     r = t.update().where(t.c.data == 'd2').values(data='d3'
             ).execute()
     eq_(r.rowcount, -1)
     r = t.delete().where(t.c.data == 'd3').execute()
     eq_(r.rowcount, -1)
     r = t.delete().execution_options(enable_rowcount=True).execute()
     eq_(r.rowcount, 1)
     r.close()
     engine.dispose()
Example #5
0
    def test_bind_arguments(self):
        users, Address, addresses, User = (self.tables.users,
                                           self.classes.Address,
                                           self.tables.addresses,
                                           self.classes.User)

        mapper(User, users)
        mapper(Address, addresses)

        e1 = engines.testing_engine()
        e2 = engines.testing_engine()
        e3 = engines.testing_engine()

        sess = Session(e3)
        sess.bind_mapper(User, e1)
        sess.bind_mapper(Address, e2)

        assert sess.connection().engine is e3
        assert sess.connection(bind=e1).engine is e1
        assert sess.connection(mapper=Address, bind=e1).engine is e1
        assert sess.connection(mapper=Address).engine is e2
        assert sess.connection(clause=addresses.select()).engine is e2
        assert sess.connection(mapper=User,
                               clause=addresses.select()).engine is e1
        assert sess.connection(mapper=User,
                               clause=addresses.select(),
                               bind=e2).engine is e2

        sess.close()
    def test_engine_param_stays(self):

        eng = testing_engine()
        isolation_level = eng.dialect.get_isolation_level(
                                eng.connect().connection)
        level = self._non_default_isolation_level()

        ne_(isolation_level, level)

        eng = testing_engine(options=dict(isolation_level=level))
        eq_(
            eng.dialect.get_isolation_level(
                                eng.connect().connection),
            level
        )

        # check that it stays
        conn = eng.connect()
        eq_(
            eng.dialect.get_isolation_level(conn.connection),
            level
        )
        conn.close()

        conn = eng.connect()
        eq_(
            eng.dialect.get_isolation_level(conn.connection),
            level
        )
        conn.close()
Example #7
0
    def test_foreignkey_missing_insert(self):
        Table("t1", self.metadata, Column("id", Integer, primary_key=True))
        t2 = Table(
            "t2",
            self.metadata,
            Column("id", Integer, ForeignKey("t1.id"), primary_key=True),
        )
        self.metadata.create_all()

        # want to ensure that "null value in column "id" violates not-
        # null constraint" is raised (IntegrityError on psycoopg2, but
        # ProgrammingError on pg8000), and not "ProgrammingError:
        # (ProgrammingError) relationship "t2_id_seq" does not exist".
        # the latter corresponds to autoincrement behavior, which is not
        # the case here due to the foreign key.

        for eng in [
            engines.testing_engine(options={"implicit_returning": False}),
            engines.testing_engine(options={"implicit_returning": True}),
        ]:
            with expect_warnings(
                ".*has no Python-side or server-side default.*"
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    eng.execute,
                    t2.insert(),
                )
Example #8
0
    def _test_lastrow_accessor(self, table_, values, assertvalues):
        """Tests the inserted_primary_key and lastrow_has_id() functions."""

        def insert_values(engine, table_, values):
            """
            Inserts a row into a table, returns the full list of values
            INSERTed including defaults that fired off on the DB side and
            detects rows that had defaults and post-fetches.
            """

            # verify implicit_returning is working
            if engine.dialect.implicit_returning:
                ins = table_.insert()
                comp = ins.compile(engine, column_keys=list(values))
                if not set(values).issuperset(
                    c.key for c in table_.primary_key
                ):
                    is_(bool(comp.returning), True)

            result = engine.execute(table_.insert(), **values)
            ret = values.copy()

            for col, id_ in zip(
                table_.primary_key, result.inserted_primary_key
            ):
                ret[col.key] = id_

            if result.lastrow_has_defaults():
                criterion = and_(
                    *[
                        col == id_
                        for col, id_ in zip(
                            table_.primary_key, result.inserted_primary_key
                        )
                    ]
                )
                row = engine.execute(table_.select(criterion)).first()
                for c in table_.c:
                    ret[c.key] = row[c]
            return ret

        if testing.against("firebird", "postgresql", "oracle", "mssql"):
            assert testing.db.dialect.implicit_returning

        if testing.db.dialect.implicit_returning:
            test_engines = [
                engines.testing_engine(options={"implicit_returning": False}),
                engines.testing_engine(options={"implicit_returning": True}),
            ]
        else:
            test_engines = [testing.db]

        for engine in test_engines:
            try:
                table_.create(bind=engine, checkfirst=True)
                i = insert_values(engine, table_, values)
                eq_(i, assertvalues)
            finally:
                table_.drop(bind=engine)
Example #9
0
    def _init_dbs(self):
        db1 = testing_engine(
            'sqlite:///shard1_%s.db' % provision.FOLLOWER_IDENT,
            options=dict(pool_threadlocal=True))
        db2 = testing_engine(
            'sqlite:///shard2_%s.db' % provision.FOLLOWER_IDENT)
        db3 = testing_engine(
            'sqlite:///shard3_%s.db' % provision.FOLLOWER_IDENT)
        db4 = testing_engine(
            'sqlite:///shard4_%s.db' % provision.FOLLOWER_IDENT)

        self.dbs = [db1, db2, db3, db4]
        return self.dbs
Example #10
0
    def _init_dbs(self):
        self.db1 = db1 = testing_engine(
            'sqlite:///shard1_%s.db' % provision.FOLLOWER_IDENT,
            options=dict(pool_threadlocal=True))
        self.db2 = db2 = testing_engine(
            'sqlite:///shard2_%s.db' % provision.FOLLOWER_IDENT)

        for db in (db1, db2):
            self.metadata.create_all(db)

        self.dbs = [db1, db2]

        return self.dbs
Example #11
0
    def _init_dbs(self):
        self.db1 = db1 = testing_engine(
            "sqlite:///shard1_%s.db" % provision.FOLLOWER_IDENT
        )
        self.db2 = db2 = testing_engine(
            "sqlite:///shard2_%s.db" % provision.FOLLOWER_IDENT
        )

        for db in (db1, db2):
            self.metadata.create_all(db)

        self.dbs = [db1, db2]

        return self.dbs
Example #12
0
    def _assert_data_with_sequence_returning(self, table, seqname):
        engine = engines.testing_engine(options={"implicit_returning": True})

        with self.sql_execution_asserter(engine) as asserter:
            with engine.connect() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                conn.execute(table.insert(), {"data": "d2"})
                conn.execute(table.insert(), {"id": 31, "data": "d3"}, {"id": 32, "data": "d4"})
                conn.execute(table.insert(), {"data": "d5"}, {"data": "d6"})
                conn.execute(table.insert(inline=True), {"id": 33, "data": "d7"})
                conn.execute(table.insert(inline=True), {"data": "d8"})

        asserter.assert_(
            DialectSQL("INSERT INTO testtable (id, data) VALUES (:id, :data)", {"id": 30, "data": "d1"}),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES " "(nextval('my_seq'), :data) RETURNING testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{"id": 31, "data": "d3"}, {"id": 32, "data": "d4"}],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (nextval('%s'), " ":data)" % seqname,
                [{"data": "d5"}, {"data": "d6"}],
            ),
            DialectSQL("INSERT INTO testtable (id, data) VALUES (:id, :data)", [{"id": 33, "data": "d7"}]),
            DialectSQL("INSERT INTO testtable (id, data) VALUES (nextval('%s'), " ":data)" % seqname, [{"data": "d8"}]),
        )

        with engine.connect() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (1, "d2"), (31, "d3"), (32, "d4"), (2, "d5"), (3, "d6"), (33, "d7"), (4, "d8")],
            )
Example #13
0
    def test_levels(self):
        e1 = engines.testing_engine()

        eq_(e1._should_log_info(), False)
        eq_(e1._should_log_debug(), False)
        eq_(e1.logger.isEnabledFor(logging.INFO), False)
        eq_(e1.logger.getEffectiveLevel(), logging.WARN)

        e1.echo = True
        eq_(e1._should_log_info(), True)
        eq_(e1._should_log_debug(), False)
        eq_(e1.logger.isEnabledFor(logging.INFO), True)
        eq_(e1.logger.getEffectiveLevel(), logging.INFO)

        e1.echo = "debug"
        eq_(e1._should_log_info(), True)
        eq_(e1._should_log_debug(), True)
        eq_(e1.logger.isEnabledFor(logging.DEBUG), True)
        eq_(e1.logger.getEffectiveLevel(), logging.DEBUG)

        e1.echo = False
        eq_(e1._should_log_info(), False)
        eq_(e1._should_log_debug(), False)
        eq_(e1.logger.isEnabledFor(logging.INFO), False)
        eq_(e1.logger.getEffectiveLevel(), logging.WARN)
Example #14
0
    def test_flag_on(self):
        t = Table(
            "t",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("data", String(50)),
        )
        t.create()

        eng = engines.testing_engine(options={"fast_executemany": True})

        @event.listens_for(eng, "after_cursor_execute")
        def after_cursor_execute(
            conn, cursor, statement, parameters, context, executemany
        ):
            if executemany:
                assert cursor.fast_executemany

        with eng.connect() as conn:
            conn.execute(
                t.insert(),
                [{"id": i, "data": "data_%d" % i} for i in range(100)],
            )

            conn.execute(t.insert(), {"id": 200, "data": "data_200"})
Example #15
0
def _engine_uri(options, file_config):
    from sqlalchemy.testing import engines, config
    from sqlalchemy import testing

    if options.dburi:
        db_urls = list(options.dburi)
    else:
        db_urls = []

    if options.db:
        for db_token in options.db:
            for db in re.split(r'[,\s]+', db_token):
                if db not in file_config.options('db'):
                    raise RuntimeError(
                        "Unknown URI specifier '%s'.  Specify --dbs for known uris."
                                % db)
                else:
                    db_urls.append(file_config.get('db', db))

    if not db_urls:
        db_urls.append(file_config.get('db', 'default'))

    for db_url in db_urls:
        eng = engines.testing_engine(db_url, db_opts)
        eng.connect().close()
        config.Config.register(eng, db_opts, options, file_config, testing)

    config.db_opts = db_opts
Example #16
0
    def test_ad_hoc_types(self):
        """test storage of bind processors, result processors
        in dialect-wide registry."""

        from sqlalchemy.dialects import mysql, postgresql, sqlite
        from sqlalchemy import types

        eng = engines.testing_engine()
        for args in (
            (types.Integer,),
            (types.String,),
            (types.PickleType,),
            (types.Enum, "a", "b", "c"),
            (sqlite.DATETIME,),
            (postgresql.ENUM, "a", "b", "c"),
            (types.Interval,),
            (postgresql.INTERVAL,),
            (mysql.VARCHAR,),
        ):

            @profile_memory()
            def go():
                type_ = args[0](*args[1:])
                bp = type_._cached_bind_processor(eng.dialect)
                rp = type_._cached_result_processor(eng.dialect, 0)
                bp, rp  # strong reference

            go()

        assert not eng.dialect._type_memos
Example #17
0
        def go():
            engine = engines.testing_engine(
                options={'logging_name': 'FOO',
                                         'pool_logging_name': 'BAR',
                                         'use_reaper': False}
            )
            sess = create_session(bind=engine)

            a1 = A(col2="a1")
            a2 = A(col2="a2")
            a3 = A(col2="a3")
            a1.bs.append(B(col2="b1"))
            a1.bs.append(B(col2="b2"))
            a3.bs.append(B(col2="b3"))
            for x in [a1, a2, a3]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).all()
            eq_(
                [
                    A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
                    A(col2="a2", bs=[]),
                    A(col2="a3", bs=[B(col2="b3")])
                ],
                alist)

            for a in alist:
                sess.delete(a)
            sess.flush()
            sess.close()
            engine.dispose()
    def test_reset_rollback_two_phase_no_rollback(self):
        # test [ticket:2907], essentially that the
        # TwoPhaseTransaction is given the job of "reset on return"
        # so that picky backends like MySQL correctly clear out
        # their state when a connection is closed without handling
        # the transaction explicitly.

        eng = testing_engine()

        # MySQL raises if you call straight rollback() on
        # a connection with an XID present
        @event.listens_for(eng, "invalidate")
        def conn_invalidated(dbapi_con, con_record, exception):
            dbapi_con.close()
            raise exception

        with eng.connect() as conn:
            rec = conn.connection._connection_record
            raw_dbapi_con = rec.connection
            xa = conn.begin_twophase()
            conn.execute(users.insert(), user_id=1, user_name='user1')

        assert rec.connection is raw_dbapi_con

        with eng.connect() as conn:
            result = \
                conn.execute(select([users.c.user_name]).
                    order_by(users.c.user_id))
            eq_(result.fetchall(), [])
Example #19
0
 def test_large_stream_single_arraysize(self):
     binary_table = self.tables.binary_table
     eng = testing_engine(options={"arraysize": 1})
     result = eng.execute(
         binary_table.select().order_by(binary_table.c.id)
     ).fetchall()
     eq_(result, [(i, self.stream) for i in range(1, 11)])
Example #20
0
    def test_row_case_sensitive_unoptimized(self):
        ins_db = engines.testing_engine(options={"case_sensitive": True})
        row = ins_db.execute(
            select([
                literal_column("1").label("case_insensitive"),
                literal_column("2").label("CaseSensitive"),
                text("3 AS screw_up_the_cols")
            ])
        ).first()

        eq_(
            list(row.keys()),
            ["case_insensitive", "CaseSensitive", "screw_up_the_cols"])

        in_("case_insensitive", row._keymap)
        in_("CaseSensitive", row._keymap)
        not_in_("casesensitive", row._keymap)

        eq_(row["case_insensitive"], 1)
        eq_(row["CaseSensitive"], 2)
        eq_(row["screw_up_the_cols"], 3)

        assert_raises(KeyError, lambda: row["Case_insensitive"])
        assert_raises(KeyError, lambda: row["casesensitive"])
        assert_raises(KeyError, lambda: row["screw_UP_the_cols"])
Example #21
0
    def test_coerce_to_unicode(self):
        engine = testing_engine(options=dict(coerce_to_unicode=True))
        value = engine.scalar("SELECT 'hello' FROM DUAL")
        assert isinstance(value, util.text_type)

        value = testing.db.scalar("SELECT 'hello' FROM DUAL")
        assert isinstance(value, util.binary_type)
    def test_per_connection(self):
        from sqlalchemy.pool import QueuePool
        eng = testing_engine(options=dict(
                                poolclass=QueuePool,
                                pool_size=2, max_overflow=0))

        c1 = eng.connect()
        c1 = c1.execution_options(
                    isolation_level=self._non_default_isolation_level()
                )
        c2 = eng.connect()
        eq_(
            eng.dialect.get_isolation_level(c1.connection),
            self._non_default_isolation_level()
        )
        eq_(
            eng.dialect.get_isolation_level(c2.connection),
            self._default_isolation_level()
        )
        c1.close()
        c2.close()
        c3 = eng.connect()
        eq_(
            eng.dialect.get_isolation_level(c3.connection),
            self._default_isolation_level()
        )
        c4 = eng.connect()
        eq_(
            eng.dialect.get_isolation_level(c4.connection),
            self._default_isolation_level()
        )

        c3.close()
        c4.close()
Example #23
0
    def test_special_encodings(self):

        for enc in ['utf8mb4', 'utf8']:
            eng = engines.testing_engine(
                options={"connect_args": {'charset': enc, 'use_unicode': 0}})
            conn = eng.connect()
            eq_(conn.dialect._connection_charset, enc)
Example #24
0
 def test_native_datetime(self):
     dbapi = testing.db.dialect.dbapi
     connect_args = {
         'detect_types': dbapi.PARSE_DECLTYPES | dbapi.PARSE_COLNAMES}
     engine = engines.testing_engine(
         options={'connect_args': connect_args, 'native_datetime': True})
     t = Table(
         'datetest', MetaData(),
         Column('id', Integer, primary_key=True),
         Column('d1', Date), Column('d2', sqltypes.TIMESTAMP))
     t.create(engine)
     try:
         engine.execute(t.insert(), {
             'd1': datetime.date(2010, 5, 10),
             'd2': datetime.datetime(2010, 5, 10, 12, 15, 25)
         })
         row = engine.execute(t.select()).first()
         eq_(
             row,
             (1, datetime.date(2010, 5, 10),
                 datetime.datetime(2010, 5, 10, 12, 15, 25)))
         r = engine.execute(func.current_date()).scalar()
         assert isinstance(r, util.string_types)
     finally:
         t.drop(engine)
         engine.dispose()
Example #25
0
    def test_native_odbc_execute(self):
        t1 = Table('t1', MetaData(), Column('c1', Integer))
        dbapi = MockDBAPI()
        engine = engines.testing_engine('mssql+mxodbc://localhost',
                options={'module': dbapi, '_initialize': False})
        conn = engine.connect()

        # crud: uses execute

        conn.execute(t1.insert().values(c1='foo'))
        conn.execute(t1.delete().where(t1.c.c1 == 'foo'))
        conn.execute(t1.update().where(t1.c.c1 == 'foo').values(c1='bar'
                     ))

        # select: uses executedirect

        conn.execute(t1.select())

        # manual flagging

        conn.execution_options(native_odbc_execute=True).\
                execute(t1.select())
        conn.execution_options(native_odbc_execute=False).\
                execute(t1.insert().values(c1='foo'
                ))
        eq_(dbapi.log, [
            'executedirect',
            'executedirect',
            'executedirect',
            'executedirect',
            'execute',
            'executedirect',
            ])
Example #26
0
    def test_lobs_without_convert_many_rows(self):
        engine = testing_engine(
            options=dict(auto_convert_lobs=False, arraysize=1))
        result = engine.execute(
            "select id, data, bindata from z_test order by id")
        results = result.fetchall()

        def go():
            eq_(
                [
                    dict(
                        id=row["id"],
                        data=row["data"].read(),
                        bindata=row["bindata"].read()
                    ) for row in results
                ],
                self.data)
        # this comes from cx_Oracle because these are raw
        # cx_Oracle.Variable objects
        if testing.requires.oracle5x.enabled:
            assert_raises_message(
                testing.db.dialect.dbapi.ProgrammingError,
                "LOB variable no longer valid after subsequent fetch",
                go
            )
        else:
            go()
Example #27
0
    def test_twophase(self):
        users, Address, addresses, User = (self.tables.users,
                                           self.classes.Address,
                                           self.tables.addresses,
                                           self.classes.User)

        # TODO: mock up a failure condition here
        # to ensure a rollback succeeds
        mapper(User, users)
        mapper(Address, addresses)

        engine2 = engines.testing_engine()
        sess = create_session(autocommit=True, autoflush=False,
                              twophase=True)
        sess.bind_mapper(User, testing.db)
        sess.bind_mapper(Address, engine2)
        sess.begin()
        u1 = User(name='u1')
        a1 = Address(email_address='u1@e')
        sess.add_all((u1, a1))
        sess.commit()
        sess.close()
        engine2.dispose()
        assert users.count().scalar() == 1
        assert addresses.count().scalar() == 1
 def setup_bind(cls):
     if config.requirements.independent_connections.enabled:
         from sqlalchemy import pool
         return engines.testing_engine(
             options=dict(poolclass=pool.StaticPool))
     else:
         return config.db
Example #29
0
    def test_explode_in_initializer(self):
        engine = engines.testing_engine()
        def broken_initialize(connection):
            connection.execute("select fake_stuff from _fake_table")

        engine.dialect.initialize = broken_initialize

        # raises a DBAPIError, not an AttributeError
        assert_raises(exc.DBAPIError, engine.connect)

        # dispose connections so we get a new one on
        # next go
        engine.dispose()

        p1 = engine.pool

        def is_disconnect(e, conn, cursor):
            return True

        engine.dialect.is_disconnect = is_disconnect

        # invalidate() also doesn't screw up
        assert_raises(exc.DBAPIError, engine.connect)

        # pool was recreated
        assert engine.pool is not p1
Example #30
0
 def setup_class(cls):
     global metadata, cattable, matchtable
     metadata = MetaData(testing.db)
     cattable = Table('cattable', metadata, Column('id', Integer),
                      Column('description', String(50)),
                      PrimaryKeyConstraint('id', name='PK_cattable'))
     matchtable = Table(
         'matchtable',
         metadata,
         Column('id', Integer),
         Column('title', String(200)),
         Column('category_id', Integer, ForeignKey('cattable.id')),
         PrimaryKeyConstraint('id', name='PK_matchtable'),
     )
     DDL("""CREATE FULLTEXT INDEX
                    ON cattable (description)
                    KEY INDEX PK_cattable""").\
         execute_at('after-create', matchtable)
     DDL("""CREATE FULLTEXT INDEX
                    ON matchtable (title)
                    KEY INDEX PK_matchtable""").\
         execute_at('after-create', matchtable)
     metadata.create_all()
     cattable.insert().execute([{'id': 1, 'description': 'Python'},
                                {'id': 2, 'description': 'Ruby'}])
     matchtable.insert().execute([
         {'id': 1, 'title': 'Web Development with Rails', 'category_id': 2},
         {'id': 2, 'title': 'Dive Into Python', 'category_id': 1},
         {'id': 3, 'title': "Programming Matz's Ruby", 'category_id': 2},
         {'id': 4, 'title': 'Guide to Django', 'category_id': 1},
         {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1}])
     DDL("WAITFOR DELAY '00:00:05'"
         ).execute(bind=engines.testing_engine())
Example #31
0
    def _test_binary_reflection(self, deprecate_large_types):
        "Exercise type specification for binary types."

        columns = [
            # column type, args, kwargs, expected ddl from reflected
            (mssql.MSBinary, [], {},
             'BINARY(1)'),
            (mssql.MSBinary, [10], {},
             'BINARY(10)'),

            (types.BINARY, [], {},
             'BINARY(1)'),
            (types.BINARY, [10], {},
             'BINARY(10)'),

            (mssql.MSVarBinary, [], {},
             'VARBINARY(max)'),
            (mssql.MSVarBinary, [10], {},
             'VARBINARY(10)'),

            (types.VARBINARY, [10], {},
             'VARBINARY(10)'),
            (types.VARBINARY, [], {},
             'VARBINARY(max)'),

            (mssql.MSImage, [], {},
             'IMAGE'),

            (mssql.IMAGE, [], {},
             'IMAGE'),

            (types.LargeBinary, [], {},
             'IMAGE' if not deprecate_large_types else 'VARBINARY(max)'),
        ]

        metadata = self.metadata
        metadata.bind = engines.testing_engine(
            options={"deprecate_large_types": deprecate_large_types})
        table_args = ['test_mssql_binary', metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res = spec
            table_args.append(Column('c%s' % index, type_(*args, **kw),
                              nullable=None))
        binary_table = Table(*table_args)
        metadata.create_all()
        reflected_binary = Table('test_mssql_binary',
                                 MetaData(testing.db), autoload=True)
        for col, spec in zip(reflected_binary.c, columns):
            eq_(
                str(col.type), spec[3],
                "column %s %s != %s" % (col.key, str(col.type), spec[3])
            )
            c1 = testing.db.dialect.type_descriptor(col.type).__class__
            c2 = \
                testing.db.dialect.type_descriptor(
                    binary_table.c[col.name].type).__class__
            assert issubclass(c1, c2), \
                'column %s: %r is not a subclass of %r' \
                % (col.key, c1, c2)
            if binary_table.c[col.name].type.length:
                testing.eq_(col.type.length,
                            binary_table.c[col.name].type.length)
def _setup_engine(cls):
    if getattr(cls, "__engine_options__", None):
        opts = dict(cls.__engine_options__)
        opts["scope"] = "class"
        eng = engines.testing_engine(options=opts)
        config._current.push_engine(eng, testing)
Example #33
0
 def setup_bind(cls):
     return engines.testing_engine(options={"optimize_limits": True})
Example #34
0
 def async_engine(self):
     return engines.testing_engine(asyncio=True, transfer_staticpool=True)
Example #35
0
 def setup(self):
     super(BatchInsertsTest, self).setup()
     self.engine = engines.testing_engine(options={"use_batch_mode": True})
Example #36
0
 def setup_bind(cls):
     return engines.testing_engine(options=dict(strategy='threadlocal'))
Example #37
0
    def test_initial_transaction_state(self):
        from psycopg2.extensions import STATUS_IN_TRANSACTION

        engine = engines.testing_engine()
        with engine.connect() as conn:
            ne_(conn.connection.status, STATUS_IN_TRANSACTION)
Example #38
0
    def test_insert_page_size(self):
        from psycopg2 import extras

        opts = self.options.copy()
        opts["executemany_batch_page_size"] = 500
        opts["executemany_values_page_size"] = 1000

        eng = engines.testing_engine(options=opts)

        if eng.dialect.executemany_mode & EXECUTEMANY_VALUES:
            meth = extras.execute_values
            stmt = "INSERT INTO data (x, y) VALUES %s"
            expected_kwargs = {
                "fetch": False,
                "page_size": 1000,
                "template": "(%(x)s, %(y)s)",
            }
        elif eng.dialect.executemany_mode & EXECUTEMANY_BATCH:
            meth = extras.execute_batch
            stmt = "INSERT INTO data (x, y) VALUES (%(x)s, %(y)s)"
            expected_kwargs = {"page_size": 500}
        else:
            assert False

        with mock.patch.object(extras, meth.__name__,
                               side_effect=meth) as mock_exec:
            with eng.begin() as conn:
                conn.execute(
                    self.tables.data.insert(),
                    [
                        {
                            "x": "x1",
                            "y": "y1"
                        },
                        {
                            "x": "x2",
                            "y": "y2"
                        },
                        {
                            "x": "x3",
                            "y": "y3"
                        },
                    ],
                )

        eq_(
            mock_exec.mock_calls,
            [
                mock.call(mock.ANY, stmt, (
                    {
                        "x": "x1",
                        "y": "y1"
                    },
                    {
                        "x": "x2",
                        "y": "y2"
                    },
                    {
                        "x": "x3",
                        "y": "y3"
                    },
                ), **expected_kwargs)
            ],
        )
Example #39
0
 def test_flag_turned_on(self):
     e = engines.testing_engine(options={"implicit_returning": True})
     assert e.dialect.implicit_returning is True
     c = e.connect()
     c.close()
     assert e.dialect.implicit_returning is True
Example #40
0
    def test_bind_through_execute(
        self, statement, expected_get_bind_args, expected_engine_name
    ):
        users, Address, addresses, User = (
            self.tables.users,
            self.classes.Address,
            self.tables.addresses,
            self.classes.User,
        )

        mapper(User, users, properties={"addresses": relationship(Address)})
        mapper(Address, addresses)

        e1 = engines.testing_engine()
        e2 = engines.testing_engine()
        e3 = engines.testing_engine()

        canary = mock.Mock()

        class GetBindSession(Session):
            def _connection_for_bind(self, bind, **kw):
                canary._connection_for_bind(bind, **kw)
                return mock.Mock()

            def get_bind(self, **kw):
                canary.get_bind(**kw)
                return Session.get_bind(self, **kw)

        sess = GetBindSession(e3, future=True)
        sess.bind_mapper(User, e1)
        sess.bind_mapper(Address, e2)

        lambda_args = dict(
            session=sess,
            User=User,
            Address=Address,
            e1=e1,
            e2=e2,
            e3=e3,
            addresses=addresses,
        )
        statement = testing.resolve_lambda(statement, **lambda_args)

        expected_get_bind_args = testing.resolve_lambda(
            expected_get_bind_args, **lambda_args
        )

        engine = {"e1": e1, "e2": e2, "e3": e3}[expected_engine_name]

        with mock.patch(
            "sqlalchemy.orm.context.ORMCompileState.orm_setup_cursor_result"
        ):
            sess.execute(statement)

        eq_(
            canary.mock_calls,
            [
                mock.call.get_bind(**expected_get_bind_args),
                mock.call._connection_for_bind(engine, close_with_result=True),
            ],
        )
        sess.close()
Example #41
0
 def test_large_stream_single_arraysize(self):
     binary_table = self.tables.binary_table
     eng = testing_engine(options={'arraysize': 1})
     result = eng.execute(binary_table.select().order_by(
         binary_table.c.id)).fetchall()
     eq_(result, [(i, self.stream) for i in range(1, 11)])
Example #42
0
    def _assert_data_with_sequence_returning(self, table, seqname):
        engine = engines.testing_engine(options={"implicit_returning": True})

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                conn.execute(table.insert(), {"data": "d2"})
                conn.execute(
                    table.insert(),
                    [
                        {
                            "id": 31,
                            "data": "d3"
                        },
                        {
                            "id": 32,
                            "data": "d4"
                        },
                    ],
                )
                conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}])
                conn.execute(table.insert().inline(), {"id": 33, "data": "d7"})
                conn.execute(table.insert().inline(), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES "
                "(nextval('my_seq'), :data) RETURNING testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
                ":data)" % seqname,
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
                ":data)" % seqname,
                [{
                    "data": "d8"
                }],
            ),
        )

        with engine.begin() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (1, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (2, "d5"),
                    (3, "d6"),
                    (33, "d7"),
                    (4, "d8"),
                ],
            )
Example #43
0
    def _assert_data_noautoincrement(self, table):
        engine = engines.testing_engine(options={"implicit_returning": False})

        # turning off the cache because we are checking for compile-time
        # warnings
        engine = engine.execution_options(compiled_cache=None)

        with engine.begin() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    [{
                        "data": "d2"
                    }, {
                        "data": "d3"
                    }],
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    [{
                        "data": "d2"
                    }, {
                        "data": "d3"
                    }],
                )

        with engine.begin() as conn:
            conn.execute(
                table.insert(),
                [{
                    "id": 31,
                    "data": "d2"
                }, {
                    "id": 32,
                    "data": "d3"
                }],
            )
            conn.execute(table.insert().inline(), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData()
        table = Table(table.name, m2, autoload_with=engine)
        with engine.begin() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    [{
                        "data": "d2"
                    }, {
                        "data": "d3"
                    }],
                )

        with engine.begin() as conn:
            conn.execute(
                table.insert(),
                [{
                    "id": 31,
                    "data": "d2"
                }, {
                    "id": 32,
                    "data": "d3"
                }],
            )
            conn.execute(table.insert().inline(), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
Example #44
0
    def test_autoincrement(self):
        Table(
            'ai_1', metadata,
            Column('int_y', Integer, primary_key=True, autoincrement=True),
            Column(
                'int_n', Integer, DefaultClause('0'), primary_key=True))
        Table(
            'ai_2', metadata,
            Column('int_y', Integer, primary_key=True, autoincrement=True),
            Column('int_n', Integer, DefaultClause('0'), primary_key=True))
        Table(
            'ai_3', metadata,
            Column('int_n', Integer, DefaultClause('0'), primary_key=True),
            Column('int_y', Integer, primary_key=True, autoincrement=True))

        Table(
            'ai_4', metadata,
            Column('int_n', Integer, DefaultClause('0'), primary_key=True),
            Column('int_n2', Integer, DefaultClause('0'), primary_key=True))
        Table(
            'ai_5', metadata,
            Column('int_y', Integer, primary_key=True, autoincrement=True),
            Column('int_n', Integer, DefaultClause('0'), primary_key=True))
        Table(
            'ai_6', metadata,
            Column('o1', String(1), DefaultClause('x'), primary_key=True),
            Column('int_y', Integer, primary_key=True, autoincrement=True))
        Table(
            'ai_7', metadata,
            Column('o1', String(1), DefaultClause('x'),
                   primary_key=True),
            Column('o2', String(1), DefaultClause('x'),
                   primary_key=True),
            Column('int_y', Integer, autoincrement=True, primary_key=True))
        Table(
            'ai_8', metadata,
            Column('o1', String(1), DefaultClause('x'),
                   primary_key=True),
            Column('o2', String(1), DefaultClause('x'),
                   primary_key=True))
        metadata.create_all()

        table_names = ['ai_1', 'ai_2', 'ai_3', 'ai_4',
                       'ai_5', 'ai_6', 'ai_7', 'ai_8']
        mr = MetaData(testing.db)

        for name in table_names:
            tbl = Table(name, mr, autoload=True)
            tbl = metadata.tables[name]

            # test that the flag itself reflects appropriately
            for col in tbl.c:
                if 'int_y' in col.name:
                    is_(col.autoincrement, True)
                    is_(tbl._autoincrement_column, col)
                else:
                    eq_(col.autoincrement, 'auto')
                    is_not_(tbl._autoincrement_column, col)

            # mxodbc can't handle scope_identity() with DEFAULT VALUES

            if testing.db.driver == 'mxodbc':
                eng = \
                    [engines.testing_engine(options={
                        'implicit_returning': True})]
            else:
                eng = \
                    [engines.testing_engine(options={
                        'implicit_returning': False}),
                     engines.testing_engine(options={
                         'implicit_returning': True})]

            for counter, engine in enumerate(eng):
                engine.execute(tbl.insert())
                if 'int_y' in tbl.c:
                    assert engine.scalar(select([tbl.c.int_y])) \
                        == counter + 1
                    assert list(
                        engine.execute(tbl.select()).first()).\
                        count(counter + 1) == 1
                else:
                    assert 1 \
                        not in list(engine.execute(tbl.select()).first())
                engine.execute(tbl.delete())
Example #45
0
    def test_round_trip(
        self,
        metadata,
        type_,
        data,
        expected,
        deprecate_large_types,
        slice_,
        zeropad,
    ):
        if (
            testing.db.dialect.deprecate_large_types
            is not deprecate_large_types
        ):
            engine = engines.testing_engine(
                options={"deprecate_large_types": deprecate_large_types}
            )
        else:
            engine = testing.db

        binary_table = Table(
            "binary_table",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("data", type_),
        )
        binary_table.create(engine)

        if isinstance(data, str) and (
            data == "binary_data_one.dat" or data == "binary_data_two.dat"
        ):
            data = self._load_stream(data)

        if slice_ is not None:
            data = data[0:slice_]

        if expected is None:
            if zeropad:
                expected = data[0:slice_] + b"\x00"
            else:
                expected = data

        with engine.begin() as conn:
            conn.execute(binary_table.insert(), dict(data=data))

            eq_(conn.scalar(select(binary_table.c.data)), expected)

            eq_(
                conn.scalar(
                    text("select data from binary_table").columns(
                        binary_table.c.data
                    )
                ),
                expected,
            )

            conn.execute(binary_table.delete())

            conn.execute(binary_table.insert(), dict(data=None))
            eq_(conn.scalar(select(binary_table.c.data)), None)

            eq_(
                conn.scalar(
                    text("select data from binary_table").columns(
                        binary_table.c.data
                    )
                ),
                None,
            )
Example #46
0
def _setup_engine(cls):
    if getattr(cls, "__engine_options__", None):
        eng = engines.testing_engine(options=cls.__engine_options__)
        config._current.push_engine(eng, testing)
Example #47
0
    def test_autoincrement(self, metadata, connection):
        Table(
            "ai_1",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_2",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_3",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )

        Table(
            "ai_4",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_n2", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_5",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_6",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )
        Table(
            "ai_7",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, autoincrement=True, primary_key=True),
        )
        Table(
            "ai_8",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
        )
        metadata.create_all(connection)

        table_names = [
            "ai_1",
            "ai_2",
            "ai_3",
            "ai_4",
            "ai_5",
            "ai_6",
            "ai_7",
            "ai_8",
        ]
        mr = MetaData()

        for name in table_names:
            tbl = Table(name, mr, autoload_with=connection)
            tbl = metadata.tables[name]

            # test that the flag itself reflects appropriately
            for col in tbl.c:
                if "int_y" in col.name:
                    is_(col.autoincrement, True)
                    is_(tbl._autoincrement_column, col)
                else:
                    eq_(col.autoincrement, "auto")
                    is_not(tbl._autoincrement_column, col)

            # mxodbc can't handle scope_identity() with DEFAULT VALUES

            if testing.db.driver == "mxodbc":
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": True}
                    )
                ]
            else:
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": False}
                    ),
                    engines.testing_engine(
                        options={"implicit_returning": True}
                    ),
                ]

            for counter, engine in enumerate(eng):
                connection.execute(tbl.insert())
                if "int_y" in tbl.c:
                    eq_(
                        connection.execute(select(tbl.c.int_y)).scalar(),
                        counter + 1,
                    )
                    assert (
                        list(connection.execute(tbl.select()).first()).count(
                            counter + 1
                        )
                        == 1
                    )
                else:
                    assert 1 not in list(
                        connection.execute(tbl.select()).first()
                    )
                connection.execute(tbl.delete())
Example #48
0
    def _assert_data_autoincrement_returning(self, table):
        engine = engines.testing_engine(options={"implicit_returning": True})

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:

                # execute with explicit id

                r = conn.execute(table.insert(), {"id": 30, "data": "d1"})
                eq_(r.inserted_primary_key, (30, ))

                # execute with prefetch id

                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, (1, ))

                # executemany with explicit ids

                conn.execute(
                    table.insert(),
                    [
                        {
                            "id": 31,
                            "data": "d3"
                        },
                        {
                            "id": 32,
                            "data": "d4"
                        },
                    ],
                )

                # executemany, uses SERIAL

                conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}])

                # single execute, explicit id, inline

                conn.execute(table.insert().inline(), {"id": 33, "data": "d7"})

                # single execute, inline, uses SERIAL

                conn.execute(table.insert().inline(), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.begin() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (1, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (2, "d5"),
                    (3, "d6"),
                    (33, "d7"),
                    (4, "d8"),
                ],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData()
        table = Table(table.name, m2, autoload_with=engine)

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, (5, ))
                conn.execute(
                    table.insert(),
                    [
                        {
                            "id": 31,
                            "data": "d3"
                        },
                        {
                            "id": 32,
                            "data": "d4"
                        },
                    ],
                )
                conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}])
                conn.execute(table.insert().inline(), {"id": 33, "data": "d7"})
                conn.execute(table.insert().inline(), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.begin() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (5, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (6, "d5"),
                    (7, "d6"),
                    (33, "d7"),
                    (8, "d8"),
                ],
            )
            conn.execute(table.delete())
Example #49
0
    def _test_setinputsizes(
        self, datatype, value, sis_value, set_nchar_flag=False
    ):
        class TestTypeDec(TypeDecorator):
            impl = NullType()

            def load_dialect_impl(self, dialect):
                if dialect.name == "oracle":
                    return dialect.type_descriptor(datatype)
                else:
                    return self.impl

        m = self.metadata
        # Oracle can have only one column of type LONG so we make three
        # tables rather than one table w/ three columns
        t1 = Table("t1", m, Column("foo", datatype))
        t2 = Table(
            "t2", m, Column("foo", NullType().with_variant(datatype, "oracle"))
        )
        t3 = Table("t3", m, Column("foo", TestTypeDec()))
        m.create_all()

        class CursorWrapper(object):
            # cx_oracle cursor can't be modified so we have to
            # invent a whole wrapping scheme

            def __init__(self, connection_fairy):
                self.cursor = connection_fairy.connection.cursor()
                self.mock = mock.Mock()
                connection_fairy.info["mock"] = self.mock

            def setinputsizes(self, *arg, **kw):
                self.mock.setinputsizes(*arg, **kw)
                self.cursor.setinputsizes(*arg, **kw)

            def __getattr__(self, key):
                return getattr(self.cursor, key)

        if set_nchar_flag:
            engine = testing_engine(options={"use_nchar_for_unicode": True})
        else:
            engine = testing.db

        with engine.connect() as conn:
            connection_fairy = conn.connection
            for tab in [t1, t2, t3]:
                with mock.patch.object(
                    connection_fairy,
                    "cursor",
                    lambda: CursorWrapper(connection_fairy),
                ):
                    conn.execute(tab.insert(), {"foo": value})

                if sis_value:
                    eq_(
                        conn.info["mock"].mock_calls,
                        [mock.call.setinputsizes(foo=sis_value)],
                    )
                else:
                    eq_(
                        conn.info["mock"].mock_calls,
                        [mock.call.setinputsizes()],
                    )
Example #50
0
 def test_lobs_without_convert(self):
     engine = testing_engine(options=dict(auto_convert_lobs=False))
     t = self.tables.z_test
     row = engine.execute(t.select().where(t.c.id == 1)).first()
     eq_(row["data"].read(), "this is text 1")
     eq_(row["bindata"].read(), b("this is binary 1"))
Example #51
0
 def test_dispose(self):
     eng = testing_engine(options=dict(strategy='threadlocal'))
     result = eng.execute(select([1]))
     eng.dispose()
     eng.execute(select([1]))
Example #52
0
 def test_default_level(self):
     eng = testing_engine(options=dict())
     isolation_level = eng.dialect.get_isolation_level(
         eng.connect().connection)
     eq_(isolation_level, self._default_isolation_level())
Example #53
0
 def test_flag_turned_off(self):
     e = engines.testing_engine(options={'implicit_returning': False})
     assert e.dialect.implicit_returning is False
     c = e.connect()
     c.close()
     assert e.dialect.implicit_returning is False
 def _engine_fixture(self, length=IDENT_LENGTH):
     eng = engines.testing_engine()
     eng.dialect.max_identifier_length = length
     return eng