def test_rowcount_flag(self): metadata = self.metadata engine = engines.testing_engine(options={"enable_rowcount": True}) assert engine.dialect.supports_sane_rowcount metadata.bind = engine t = Table("t1", metadata, Column("data", String(10))) metadata.create_all() r = t.insert().execute({"data": "d1"}, {"data": "d2"}, {"data": "d3"}) r = t.update().where(t.c.data == "d2").values(data="d3").execute() eq_(r.rowcount, 1) r = t.delete().where(t.c.data == "d3").execute() eq_(r.rowcount, 2) r = t.delete().execution_options(enable_rowcount=False).execute() eq_(r.rowcount, -1) engine.dispose() engine = engines.testing_engine(options={"enable_rowcount": False}) assert not engine.dialect.supports_sane_rowcount metadata.bind = engine r = t.insert().execute({"data": "d1"}, {"data": "d2"}, {"data": "d3"}) r = t.update().where(t.c.data == "d2").values(data="d3").execute() eq_(r.rowcount, -1) r = t.delete().where(t.c.data == "d3").execute() eq_(r.rowcount, -1) r = t.delete().execution_options(enable_rowcount=True).execute() eq_(r.rowcount, 1) r.close() engine.dispose()
def test_bind_arguments(self): users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User, ) mapper(User, users) mapper(Address, addresses) e1 = engines.testing_engine() e2 = engines.testing_engine() e3 = engines.testing_engine() sess = Session(e3) sess.bind_mapper(User, e1) sess.bind_mapper(Address, e2) assert sess.connection().engine is e3 assert sess.connection(bind=e1).engine is e1 assert sess.connection(mapper=Address, bind=e1).engine is e1 assert sess.connection(mapper=Address).engine is e2 assert sess.connection(clause=addresses.select()).engine is e2 assert (sess.connection(mapper=User, clause=addresses.select()).engine is e1) assert (sess.connection( mapper=User, clause=addresses.select(), bind=e2).engine is e2) sess.close()
def test_foreignkey_missing_insert(self): Table("t1", self.metadata, Column("id", Integer, primary_key=True)) t2 = Table( "t2", self.metadata, Column("id", Integer, ForeignKey("t1.id"), primary_key=True), ) self.metadata.create_all() # want to ensure that "null value in column "id" violates not- # null constraint" is raised (IntegrityError on psycoopg2, but # ProgrammingError on pg8000), and not "ProgrammingError: # (ProgrammingError) relationship "t2_id_seq" does not exist". # the latter corresponds to autoincrement behavior, which is not # the case here due to the foreign key. for eng in [ engines.testing_engine(options={"implicit_returning": False}), engines.testing_engine(options={"implicit_returning": True}), ]: with expect_warnings( ".*has no Python-side or server-side default.*"): assert_raises( (exc.IntegrityError, exc.ProgrammingError), eng.execute, t2.insert(), )
def _test_lastrow_accessor(self, table_, values, assertvalues): """Tests the inserted_primary_key and lastrow_has_id() functions.""" def insert_values(engine, table_, values): """ Inserts a row into a table, returns the full list of values INSERTed including defaults that fired off on the DB side and detects rows that had defaults and post-fetches. """ # verify implicit_returning is working if engine.dialect.implicit_returning: ins = table_.insert() comp = ins.compile(engine, column_keys=list(values)) if not set(values).issuperset( c.key for c in table_.primary_key ): is_(bool(comp.returning), True) result = engine.execute(table_.insert(), **values) ret = values.copy() for col, id_ in zip( table_.primary_key, result.inserted_primary_key ): ret[col.key] = id_ if result.lastrow_has_defaults(): criterion = and_( *[ col == id_ for col, id_ in zip( table_.primary_key, result.inserted_primary_key ) ] ) row = engine.execute(table_.select(criterion)).first() for c in table_.c: ret[c.key] = row[c] return ret if testing.against("firebird", "postgresql", "oracle", "mssql"): assert testing.db.dialect.implicit_returning if testing.db.dialect.implicit_returning: test_engines = [ engines.testing_engine(options={"implicit_returning": False}), engines.testing_engine(options={"implicit_returning": True}), ] else: test_engines = [testing.db] for engine in test_engines: try: table_.create(bind=engine, checkfirst=True) i = insert_values(engine, table_, values) eq_(i, assertvalues) finally: table_.drop(bind=engine)
def setup(self): self.eng = engines.testing_engine(options={"echo": True}) self.no_param_engine = engines.testing_engine( options={"echo": True, "hide_parameters": True} ) self.eng.execute("create table if not exists foo (data string)") self.no_param_engine.execute( "create table if not exists foo (data string)" ) self.buf = logging.handlers.BufferingHandler(100) for log in [logging.getLogger("sqlalchemy_1_3.engine")]: log.addHandler(self.buf)
def _init_dbs(self): self.db1 = db1 = testing_engine( "sqlite:///shard1_%s.db" % provision.FOLLOWER_IDENT ) self.db2 = db2 = testing_engine( "sqlite:///shard2_%s.db" % provision.FOLLOWER_IDENT ) for db in (db1, db2): self.metadata.create_all(db) self.dbs = [db1, db2] return self.dbs
def test_lobs_without_convert_many_rows(self): engine = testing_engine( options=dict(auto_convert_lobs=False, arraysize=1) ) result = engine.execute( "select id, data, bindata from z_test order by id" ) results = result.fetchall() def go(): eq_( [ dict( id=row["id"], data=row["data"].read(), bindata=row["bindata"].read(), ) for row in results ], self.data, ) # this comes from cx_Oracle because these are raw # cx_Oracle.Variable objects if testing.requires.oracle5x.enabled: assert_raises_message( testing.db.dialect.dbapi.ProgrammingError, "LOB variable no longer valid after subsequent fetch", go, ) else: go()
def test_large_stream_single_arraysize(self): binary_table = self.tables.binary_table eng = testing_engine(options={"arraysize": 1}) result = eng.execute( binary_table.select().order_by(binary_table.c.id) ).fetchall() eq_(result, [(i, self.stream) for i in range(1, 11)])
def test_no_default_isolation_level(self): from sqlalchemy_1_3.testing import mock engine = engines.testing_engine() real_isolation_level = testing.db.dialect.get_isolation_level def fake_isolation_level(connection): connection = mock.Mock( cursor=mock.Mock( return_value=mock.Mock( fetchone=mock.Mock(return_value=None) ) ) ) return real_isolation_level(connection) with mock.patch.object( engine.dialect, "get_isolation_level", fake_isolation_level ): with expect_warnings( "Could not retrieve transaction isolation level for MySQL " "connection." ): engine.connect()
def test_per_connection(self): from sqlalchemy_1_3.pool import QueuePool eng = testing_engine( options=dict(poolclass=QueuePool, pool_size=2, max_overflow=0) ) c1 = eng.connect() c1 = c1.execution_options( isolation_level=self._non_default_isolation_level() ) c2 = eng.connect() eq_( eng.dialect.get_isolation_level(c1.connection), self._non_default_isolation_level(), ) eq_( eng.dialect.get_isolation_level(c2.connection), self._default_isolation_level(), ) c1.close() c2.close() c3 = eng.connect() eq_( eng.dialect.get_isolation_level(c3.connection), self._default_isolation_level(), ) c4 = eng.connect() eq_( eng.dialect.get_isolation_level(c4.connection), self._default_isolation_level(), ) c3.close() c4.close()
def test_levels(self): e1 = engines.testing_engine() eq_(e1._should_log_info(), False) eq_(e1._should_log_debug(), False) eq_(e1.logger.isEnabledFor(logging.INFO), False) eq_(e1.logger.getEffectiveLevel(), logging.WARN) e1.echo = True eq_(e1._should_log_info(), True) eq_(e1._should_log_debug(), False) eq_(e1.logger.isEnabledFor(logging.INFO), True) eq_(e1.logger.getEffectiveLevel(), logging.INFO) e1.echo = "debug" eq_(e1._should_log_info(), True) eq_(e1._should_log_debug(), True) eq_(e1.logger.isEnabledFor(logging.DEBUG), True) eq_(e1.logger.getEffectiveLevel(), logging.DEBUG) e1.echo = False eq_(e1._should_log_info(), False) eq_(e1._should_log_debug(), False) eq_(e1.logger.isEnabledFor(logging.INFO), False) eq_(e1.logger.getEffectiveLevel(), logging.WARN)
def test_custom_max_identifier_length(self): max_ident_length = testing.db.dialect.max_identifier_length eng = engines.testing_engine( options={"max_identifier_length": max_ident_length + 20} ) with eng.connect() as conn: eq_(conn.dialect.max_identifier_length, max_ident_length + 20)
def test_reset_rollback_two_phase_no_rollback(self): # test [ticket:2907], essentially that the # TwoPhaseTransaction is given the job of "reset on return" # so that picky backends like MySQL correctly clear out # their state when a connection is closed without handling # the transaction explicitly. eng = testing_engine() # MySQL raises if you call straight rollback() on # a connection with an XID present @event.listens_for(eng, "invalidate") def conn_invalidated(dbapi_con, con_record, exception): dbapi_con.close() raise exception with eng.connect() as conn: rec = conn.connection._connection_record raw_dbapi_con = rec.connection conn.begin_twophase() conn.execute(users.insert(), user_id=1, user_name="user1") assert rec.connection is raw_dbapi_con with eng.connect() as conn: result = conn.execute( select([users.c.user_name]).order_by(users.c.user_id) ) eq_(result.fetchall(), [])
def test_coerce_to_unicode(self): engine = testing_engine(options=dict(coerce_to_unicode=False)) value = engine.scalar("SELECT 'hello' FROM DUAL") assert isinstance(value, util.binary_type) value = testing.db.scalar("SELECT 'hello' FROM DUAL") assert isinstance(value, util.text_type)
def test_seq_nonpk(self): """test sequences fire off as defaults on non-pk columns""" sometable = self.tables.Manager engine = engines.testing_engine(options={"implicit_returning": False}) with engine.connect() as conn: result = conn.execute(sometable.insert(), dict(name="somename")) eq_(result.postfetch_cols(), [sometable.c.obj_id]) result = conn.execute(sometable.insert(), dict(name="someother")) conn.execute( sometable.insert(), [{"name": "name3"}, {"name": "name4"}] ) eq_( list( conn.execute(sometable.select().order_by(sometable.c.id)) ), [ (1, "somename", 1), (2, "someother", 2), (3, "name3", 3), (4, "name4", 4), ], )
def go(): engine = engines.testing_engine( options={ "logging_name": "FOO", "pool_logging_name": "BAR", "use_reaper": False, }) sess = create_session(bind=engine) a1 = A(col2="a1") a2 = A(col2="a2") a3 = A(col2="a3") a1.bs.append(B(col2="b1")) a1.bs.append(B(col2="b2")) a3.bs.append(B(col2="b3")) for x in [a1, a2, a3]: sess.add(x) sess.flush() sess.expunge_all() alist = sess.query(A).order_by(A.col1).all() eq_( [ A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]), A(col2="a2", bs=[]), A(col2="a3", bs=[B(col2="b3")]), ], alist, ) for a in alist: sess.delete(a) sess.flush() sess.close() engine.dispose()
def test_ad_hoc_types(self): """test storage of bind processors, result processors in dialect-wide registry.""" from sqlalchemy_1_3.dialects import mysql, postgresql, sqlite from sqlalchemy_1_3 import types eng = engines.testing_engine() for args in ( (types.Integer, ), (types.String, ), (types.PickleType, ), (types.Enum, "a", "b", "c"), (sqlite.DATETIME, ), (postgresql.ENUM, "a", "b", "c"), (types.Interval, ), (postgresql.INTERVAL, ), (mysql.VARCHAR, ), ): @profile_memory() def go(): type_ = args[0](*args[1:]) bp = type_._cached_bind_processor(eng.dialect) rp = type_._cached_result_processor(eng.dialect, 0) bp, rp # strong reference go() assert not eng.dialect._type_memos
def _init_dbs(self): db1 = testing_engine( "sqlite:///shard1_%s.db" % provision.FOLLOWER_IDENT, options=dict(poolclass=SingletonThreadPool), ) db2 = testing_engine( "sqlite:///shard2_%s.db" % provision.FOLLOWER_IDENT ) db3 = testing_engine( "sqlite:///shard3_%s.db" % provision.FOLLOWER_IDENT ) db4 = testing_engine( "sqlite:///shard4_%s.db" % provision.FOLLOWER_IDENT ) self.dbs = [db1, db2, db3, db4] return self.dbs
def _named_engine(self, **kw): options = { "logging_name": "myenginename", "pool_logging_name": "mypoolname", "echo": True, } options.update(kw) return engines.testing_engine(options=options)
def test_special_encodings(self): for enc in ["utf8mb4", "utf8"]: eng = engines.testing_engine( options={"connect_args": {"charset": enc, "use_unicode": 0}} ) conn = eng.connect() eq_(conn.dialect._connection_charset, enc)
def test_table_names_w_system(self): engine = testing_engine(options={"exclude_tablespaces": ["FOO"]}) insp = inspect(engine) eq_( set(insp.get_table_names()).intersection(["my_table", "foo_table"]), set(["my_table", "foo_table"]), )
def test_trans_commit_reset_agent_broken_ensure(self): eng = testing_engine(options={"pool_reset_on_return": "commit"}) conn = eng.connect() trans = conn.begin() assert conn.connection._reset_agent is trans trans.is_active = False with expect_warnings("Reset agent is not active"): conn.close()
def test_trans_reset_agent_broken_ensure(self): eng = testing_engine() conn = eng.connect() trans = conn.begin() assert conn.connection._reset_agent is trans trans.is_active = False with expect_warnings("Reset agent is not active"): conn.close()
def _testing_engine(self): e = engines.testing_engine() # do an initial execute to clear out 'first connect' # messages e.execute(select([10])).close() self.buf.flush() return e
def test_charset_caching(self): engine = engines.testing_engine() cx = engine.connect() meta = MetaData() charset = engine.dialect._detect_charset(cx) meta.reflect(cx) eq_(cx.dialect._connection_charset, charset) cx.close()
def test_explode_in_initializer(self): engine = engines.testing_engine() def broken_initialize(connection): connection.execute("select fake_stuff from _fake_table") engine.dialect.initialize = broken_initialize # raises a DBAPIError, not an AttributeError assert_raises(exc.DBAPIError, engine.connect)
def test_executemany_correct_flag_options(self): for opt, expected in [ (None, EXECUTEMANY_DEFAULT), ("batch", EXECUTEMANY_BATCH), ("values", EXECUTEMANY_VALUES), ]: self.engine = engines.testing_engine( options={"executemany_mode": opt} ) is_(self.engine.dialect.executemany_mode, expected)
def setup(self): connect = testing.db.pool._creator def _creator(): conn = connect() cursor = conn.cursor() cursor.execute("ALTER SESSION SET NLS_TERRITORY='GERMANY'") cursor.close() return conn self.engine = testing_engine(options={"creator": _creator})
def test_label_length_raise_too_large(self): max_ident_length = testing.db.dialect.max_identifier_length eng = engines.testing_engine( options={"label_length": max_ident_length + 10} ) assert_raises_message( exceptions.ArgumentError, "Label length of %d is greater than this dialect's maximum " "identifier length of %d" % (max_ident_length + 10, max_ident_length), eng.connect, )
def test_engine_param_stays(self): eng = testing_engine() isolation_level = eng.dialect.get_isolation_level( eng.connect().connection ) level = self._non_default_isolation_level() ne_(isolation_level, level) eng = testing_engine(options=dict(isolation_level=level)) eq_(eng.dialect.get_isolation_level(eng.connect().connection), level) # check that it stays conn = eng.connect() eq_(eng.dialect.get_isolation_level(conn.connection), level) conn.close() conn = eng.connect() eq_(eng.dialect.get_isolation_level(conn.connection), level) conn.close()