def test_parent_instance_child_class_apply_after(self): l1 = Mock() l2 = Mock() event.listen(self.TargetElement, "event_one", l2) factory = self.TargetFactory() element = factory.create() element.run_event(1) event.listen(factory, "event_one", l1) element.run_event(2) element.run_event(3) # c1 gets no events due to _JoinedListener # fixing the "parent" at construction time. # this can be changed to be "live" at the cost # of performance. eq_( l1.mock_calls, [] ) eq_( l2.mock_calls, [call(element, 1), call(element, 2), call(element, 3)] )
def test_conn_reusable(self): conn = self.db.connect() conn.execute(select([1])) eq_( self.dbapi.connect.mock_calls, [self.mock_connect] ) self.dbapi.shutdown() assert_raises( tsa.exc.DBAPIError, conn.execute, select([1]) ) assert not conn.closed assert conn.invalidated eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()]] ) # test reconnects conn.execute(select([1])) assert not conn.invalidated eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], []] )
def test_deferred_map_event_subclass_post_mapping_propagate_two(self): """ 1. map only subclass of class 2. mapper event listen on class, w propagate 3. event fire should receive event """ users, User = (self.tables.users, self.classes.User) class SubUser(User): pass class SubSubUser(SubUser): pass m = mapper(SubUser, users) canary = Mock() event.listen(User, "before_insert", canary, propagate=True, raw=True) m2 = mapper(SubSubUser, users) m.dispatch.before_insert(5, 6, 7) eq_(canary.mock_calls, [call(5, 6, 7)]) m2.dispatch.before_insert(8, 9, 10) eq_(canary.mock_calls, [call(5, 6, 7), call(8, 9, 10)])
def test_scalar(self): users = self.tables.users canary = Mock() class User(fixtures.ComparableEntity): @validates('name') def validate_name(self, key, name): canary(key, name) ne_(name, 'fred') return name + ' modified' mapper(User, users) sess = Session() u1 = User(name='ed') eq_(u1.name, 'ed modified') assert_raises(AssertionError, setattr, u1, "name", "fred") eq_(u1.name, 'ed modified') eq_(canary.mock_calls, [call('name', 'ed'), call('name', 'fred')]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name='ed modified').one(), User(name='ed') )
def test_propagate(self): Target = self._fixture() m1 = Mock() t1 = Target() t2 = Target() event.listen(t1, "event_one", m1, propagate=True) event.listen(t1, "event_two", m1, propagate=False) t2.dispatch._update(t1.dispatch) t1.dispatch.event_one("t1e1x") t1.dispatch.event_two("t1e2x") t2.dispatch.event_one("t2e1x") t2.dispatch.event_two("t2e2x") event.remove(t1, "event_one", m1) event.remove(t1, "event_two", m1) t1.dispatch.event_one("t1e1y") t1.dispatch.event_two("t1e2y") t2.dispatch.event_one("t2e1y") t2.dispatch.event_two("t2e2y") eq_(m1.mock_calls, [call('t1e1x'), call('t1e2x'), call('t2e1x')])
def test_instance(self): Target = self._fixture() class Foo(object): def __init__(self): self.mock = Mock() def evt(self, arg): self.mock(arg) f1 = Foo() f2 = Foo() event.listen(Target, "event_one", f1.evt) event.listen(Target, "event_one", f2.evt) t1 = Target() t1.dispatch.event_one("x") event.remove(Target, "event_one", f1.evt) t1.dispatch.event_one("y") eq_(f1.mock.mock_calls, [call("x")]) eq_(f2.mock.mock_calls, [call("x"), call("y")])
def test_parent_instance_child_class_apply_after(self): l1 = Mock() l2 = Mock() event.listen(self.TargetElement, "event_one", l2) factory = self.TargetFactory() element = factory.create() element.run_event(1) event.listen(factory, "event_one", l1) element.run_event(2) element.run_event(3) # if _JoinedListener fixes .listeners # at construction time, then we don't get # the new listeners. #eq_(l1.mock_calls, []) # alternatively, if _JoinedListener shares the list # using a @property, then we get them, at the arguable # expense of the extra method call to access the .listeners # collection eq_( l1.mock_calls, [call(element, 2), call(element, 3)] ) eq_( l2.mock_calls, [call(element, 1), call(element, 2), call(element, 3)] )
def test_invalidate_trans(self): conn = self.db.connect() trans = conn.begin() self.dbapi.shutdown() assert_raises(tsa.exc.DBAPIError, conn.execute, select([1])) eq_([c.close.mock_calls for c in self.dbapi.connections], [[call()]]) assert not conn.closed assert conn.invalidated assert trans.is_active assert_raises_message( tsa.exc.StatementError, "Can't reconnect until invalid transaction is rolled back", conn.execute, select([1]), ) assert trans.is_active assert_raises_message( tsa.exc.InvalidRequestError, "Can't reconnect until invalid transaction is rolled back", trans.commit ) assert trans.is_active trans.rollback() assert not trans.is_active conn.execute(select([1])) assert not conn.invalidated eq_([c.close.mock_calls for c in self.dbapi.connections], [[call()], []])
def test_collection(self): users, addresses, Address = (self.tables.users, self.tables.addresses, self.classes.Address) canary = Mock() class User(fixtures.ComparableEntity): @validates('addresses') def validate_address(self, key, ad): canary(key, ad) assert '@' in ad.email_address return ad mapper(User, users, properties={ 'addresses': relationship(Address)} ) mapper(Address, addresses) sess = Session() u1 = User(name='edward') a0 = Address(email_address='noemail') assert_raises(AssertionError, u1.addresses.append, a0) a1 = Address(id=15, email_address='*****@*****.**') u1.addresses.append(a1) eq_(canary.mock_calls, [call('addresses', a0), call('addresses', a1)]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name='edward').one(), User(name='edward', addresses=[Address(email_address='*****@*****.**')]) )
def test_reconnect(self): """test that an 'is_disconnect' condition will invalidate the connection, and additionally dispose the previous connection pool and recreate.""" db_pool = self.db.pool # make a connection conn = self.db.connect() # connection works conn.execute(select([1])) # create a second connection within the pool, which we'll ensure # also goes away conn2 = self.db.connect() conn2.close() # two connections opened total now assert len(self.dbapi.connections) == 2 # set it to fail self.dbapi.shutdown() assert_raises(tsa.exc.DBAPIError, conn.execute, select([1])) # assert was invalidated assert not conn.closed assert conn.invalidated # close shouldn't break conn.close() # ensure one connection closed... eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], []], ) conn = self.db.connect() eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], [call()], []], ) conn.execute(select([1])) conn.close() eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], [call()], []], )
def test_hanging_connect_within_overflow(self): """test that a single connect() call which is hanging does not block other connections from proceeding.""" dbapi = Mock() mutex = threading.Lock() def hanging_dbapi(): time.sleep(2) with mutex: return dbapi.connect() def fast_dbapi(): with mutex: return dbapi.connect() creator = threading.local() def create(): return creator.mock_connector() def run_test(name, pool, should_hang): if should_hang: creator.mock_connector = hanging_dbapi else: creator.mock_connector = fast_dbapi conn = pool.connect() conn.operation(name) time.sleep(1) conn.close() p = pool.QueuePool(creator=create, pool_size=2, max_overflow=3) threads = [ threading.Thread( target=run_test, args=("success_one", p, False)), threading.Thread( target=run_test, args=("success_two", p, False)), threading.Thread( target=run_test, args=("overflow_one", p, True)), threading.Thread( target=run_test, args=("overflow_two", p, False)), threading.Thread( target=run_test, args=("overflow_three", p, False)) ] for t in threads: t.start() time.sleep(.2) for t in threads: t.join(timeout=join_timeout) eq_( dbapi.connect().operation.mock_calls, [call("success_one"), call("success_two"), call("overflow_two"), call("overflow_three"), call("overflow_one")] )
def test_remove_instancelevel(self): listen_one = Mock() t1 = self.Target() event.listen(t1, "event_one", listen_one, add=True) t1.dispatch.event_one(5, 7) eq_(listen_one.mock_calls, [call(12)]) event.remove(t1, "event_one", listen_one) t1.dispatch.event_one(10, 5) eq_(listen_one.mock_calls, [call(12)])
def test_parent_class_only(self): l1 = Mock() event.listen(self.TargetFactory, "event_one", l1) element = self.TargetFactory().create() element.run_event(1) element.run_event(2) element.run_event(3) eq_(l1.mock_calls, [call(element, 1), call(element, 2), call(element, 3)])
def test_bulk_save_mappings_preserve_order(self): User, = self.classes("User") s = Session() # commit some object into db user1 = User(name="i1") user2 = User(name="i2") s.add(user1) s.add(user2) s.commit() # make some changes user1.name = "u1" user3 = User(name="i3") s.add(user3) user2.name = "u2" objects = [user1, user3, user2] from sqlalchemy import inspect def _bulk_save_mappings( mapper, mappings, isupdate, isstates, return_defaults, update_changed_only, render_nulls, ): mock_method(list(mappings), isupdate) mock_method = mock.Mock() with mock.patch.object(s, "_bulk_save_mappings", _bulk_save_mappings): s.bulk_save_objects(objects) eq_( mock_method.mock_calls, [ mock.call([inspect(user1)], True), mock.call([inspect(user3)], False), mock.call([inspect(user2)], True), ], ) mock_method = mock.Mock() with mock.patch.object(s, "_bulk_save_mappings", _bulk_save_mappings): s.bulk_save_objects(objects, preserve_order=False) eq_( mock_method.mock_calls, [ mock.call([inspect(user3)], False), mock.call([inspect(user1), inspect(user2)], True), ], )
def test_validator_w_removes(self): users, addresses, Address = ( self.tables.users, self.tables.addresses, self.classes.Address, ) canary = Mock() class User(fixtures.ComparableEntity): @validates("name", include_removes=True) def validate_name(self, key, item, remove): canary(key, item, remove) return item @validates("addresses", include_removes=True) def validate_address(self, key, item, remove): canary(key, item, remove) return item mapper(User, users, properties={"addresses": relationship(Address)}) mapper(Address, addresses) u1 = User() u1.name = "ed" u1.name = "mary" del u1.name a1, a2, a3 = Address(), Address(), Address() u1.addresses.append(a1) u1.addresses.remove(a1) u1.addresses = [a1, a2] u1.addresses = [a2, a3] eq_( canary.mock_calls, [ call("name", "ed", False), call("name", "mary", False), call("name", "mary", True), # append a1 call("addresses", a1, False), # remove a1 call("addresses", a1, True), # set to [a1, a2] - this is two appends call("addresses", a1, False), call("addresses", a2, False), # set to [a2, a3] - this is a remove of a1, # append of a3. the appends are first. # in 1.2 due to #3896, we also get 'a2' in the # validates as it is part of the set call("addresses", a2, False), call("addresses", a3, False), call("addresses", a1, True), ], )
def test_kw_ok(self): l1 = Mock() def listen(**kw): l1(kw) event.listen(self.TargetFactory, "event_one", listen, named=True) element = self.TargetFactory().create() element.run_event(1) element.run_event(2) eq_(l1.mock_calls, [call({"target": element, "arg": 1}), call({"target": element, "arg": 2})])
def test_plugin_multiple_url_registration(self): from sqlalchemy.dialects import sqlite global MyEnginePlugin1 global MyEnginePlugin2 def side_effect_1(url, kw): eq_(kw, {"logging_name": "foob"}) kw["logging_name"] = "bar" url.query.pop("myplugin1_arg", None) return MyEnginePlugin1 def side_effect_2(url, kw): url.query.pop("myplugin2_arg", None) return MyEnginePlugin2 MyEnginePlugin1 = Mock(side_effect=side_effect_1) MyEnginePlugin2 = Mock(side_effect=side_effect_2) plugins.register("engineplugin1", __name__, "MyEnginePlugin1") plugins.register("engineplugin2", __name__, "MyEnginePlugin2") e = create_engine( "sqlite:///?plugin=engineplugin1&foo=bar&myplugin1_arg=bat" "&plugin=engineplugin2&myplugin2_arg=hoho", logging_name="foob", ) eq_(e.dialect.name, "sqlite") eq_(e.logging_name, "bar") # plugin args are removed from URL. eq_(e.url.query, {"foo": "bar"}) assert isinstance(e.dialect, sqlite.dialect) eq_( MyEnginePlugin1.mock_calls, [ call(url.make_url("sqlite:///?foo=bar"), {}), call.handle_dialect_kwargs(sqlite.dialect, mock.ANY), call.handle_pool_kwargs(mock.ANY, {"dialect": e.dialect}), call.engine_created(e), ], ) eq_( MyEnginePlugin2.mock_calls, [ call(url.make_url("sqlite:///?foo=bar"), {}), call.handle_dialect_kwargs(sqlite.dialect, mock.ANY), call.handle_pool_kwargs(mock.ANY, {"dialect": e.dialect}), call.engine_created(e), ], )
def test_parent_events_child_no_events(self): l1 = Mock() factory = self.TargetFactory() event.listen(self.TargetElement, "event_one", l1) element = factory.create() element.run_event(1) element.run_event(2) element.run_event(3) eq_(l1.mock_calls, [call(element, 1), call(element, 2), call(element, 3)])
def test_detach(self): dbapi, p = self._queuepool_dbapi_fixture(pool_size=1, max_overflow=0) c1 = p.connect() c1.detach() c2 = p.connect() eq_(dbapi.connect.mock_calls, [call("foo.db"), call("foo.db")]) c1_con = c1.connection assert c1_con is not None eq_(c1_con.close.call_count, 0) c1.close() eq_(c1_con.close.call_count, 1)
def test_remove_wrapped_named(self): Target = self._wrapped_fixture() listen_one = Mock() t1 = Target() event.listen(t1, "event_one", listen_one, named=True) t1.dispatch.event_one("t1") eq_(listen_one.mock_calls, [call(x="adapted t1")]) event.remove(t1, "event_one", listen_one) t1.dispatch.event_one("t2") eq_(listen_one.mock_calls, [call(x="adapted t1")])
def test_listen_override(self): listen_one = Mock() listen_two = Mock() event.listen(self.Target, "event_one", listen_one, add=True) event.listen(self.Target, "event_one", listen_two) t1 = self.Target() t1.dispatch.event_one(5, 7) t1.dispatch.event_one(10, 5) eq_(listen_one.mock_calls, [call(12), call(15)]) eq_(listen_two.mock_calls, [call(5, 7), call(10, 5)])
def test_singleton_behavior_within_decl(self): counter = mock.Mock() class Mixin(object): @declared_attr def my_prop(cls): counter(cls) return Column('x', Integer) class A(Base, Mixin): __tablename__ = 'a' id = Column(Integer, primary_key=True) @declared_attr def my_other_prop(cls): return column_property(cls.my_prop + 5) eq_(counter.mock_calls, [mock.call(A)]) class B(Base, Mixin): __tablename__ = 'b' id = Column(Integer, primary_key=True) @declared_attr def my_other_prop(cls): return column_property(cls.my_prop + 5) eq_( counter.mock_calls, [mock.call(A), mock.call(B)]) # this is why we need singleton-per-class behavior. We get # an un-bound "x" column otherwise here, because my_prop() generates # multiple columns. a_col = A.my_other_prop.__clause_element__().element.left b_col = B.my_other_prop.__clause_element__().element.left is_(a_col.table, A.__table__) is_(b_col.table, B.__table__) is_(a_col, A.__table__.c.x) is_(b_col, B.__table__.c.x) s = Session() self.assert_compile( s.query(A), "SELECT a.x AS a_x, a.x + :x_1 AS anon_1, a.id AS a_id FROM a" ) self.assert_compile( s.query(B), "SELECT b.x AS b_x, b.x + :x_1 AS anon_1, b.id AS b_id FROM b" )
def _assert_retaining(self, engine, flag): conn = engine.connect() trans = conn.begin() trans.commit() eq_( engine.dialect.dbapi.connect.return_value.commit.mock_calls, [call(flag)] ) trans = conn.begin() trans.rollback() eq_( engine.dialect.dbapi.connect.return_value.rollback.mock_calls, [call(flag)] )
def test_parent_class_child_instance_apply_after(self): l1 = Mock() l2 = Mock() event.listen(self.TargetFactory, "event_one", l1) element = self.TargetFactory().create() element.run_event(1) event.listen(element, "event_one", l2) element.run_event(2) element.run_event(3) eq_(l1.mock_calls, [call(element, 1), call(element, 2), call(element, 3)]) eq_(l2.mock_calls, [call(element, 2), call(element, 3)])
def test_plugin_registration(self): from sqlalchemy.dialects import sqlite global MyEnginePlugin def side_effect(url, kw): eq_(kw, {"logging_name": "foob"}) kw['logging_name'] = 'bar' return MyEnginePlugin MyEnginePlugin = Mock(side_effect=side_effect) plugins.register("engineplugin", __name__, "MyEnginePlugin") e = create_engine( "sqlite:///?plugin=engineplugin&foo=bar", logging_name='foob') eq_(e.dialect.name, "sqlite") eq_(e.logging_name, "bar") assert isinstance(e.dialect, sqlite.dialect) eq_( MyEnginePlugin.mock_calls, [ call(e.url, {}), call.handle_dialect_kwargs(sqlite.dialect, mock.ANY), call.handle_pool_kwargs(mock.ANY, {"dialect": e.dialect}), call.engine_created(e) ] ) eq_( str(MyEnginePlugin.mock_calls[0][1][0]), "sqlite:///?foo=bar" )
def _test_legacy_accept_kw(self, target, canary): target.dispatch.event_four(4, 5, 6, 7, foo="bar") eq_( canary.mock_calls, [call(4, 5, {"foo": "bar"})] )
def test_cursor_explode(self): db = self._fixture(False, False) conn = db.connect() result = conn.execute("select foo") result.close() conn.close() eq_(db.pool.logger.error.mock_calls, [call("Error closing cursor", exc_info=True)])
def test_on_bulk_delete_hook(self): User, users = self.classes.User, self.tables.users sess = Session() canary = Mock() event.listen(sess, "after_begin", canary.after_begin) event.listen(sess, "after_bulk_delete", canary.after_bulk_delete) def legacy(ses, qry, ctx, res): canary.after_bulk_delete_legacy(ses, qry, ctx, res) event.listen(sess, "after_bulk_delete", legacy) mapper(User, users) sess.query(User).delete() eq_( canary.after_begin.call_count, 1 ) eq_( canary.after_bulk_delete.call_count, 1 ) upd = canary.after_bulk_delete.mock_calls[0][1][0] eq_( upd.session, sess ) eq_( canary.after_bulk_delete_legacy.mock_calls, [call(sess, upd.query, upd.context, upd.result)] )
def test_reconnect(self): dbapi = MockDBAPI() p = pool.NullPool(creator=lambda: dbapi.connect('foo.db')) c1 = p.connect() c1.close() c1 = None c1 = p.connect() c1.invalidate() c1 = None c1 = p.connect() dbapi.connect.assert_has_calls([ call('foo.db'), call('foo.db')], any_order=True)
def test_standard_accept_has_legacies(self): canary = Mock() event.listen(self.TargetOne, "event_three", canary) self.TargetOne().dispatch.event_three(4, 5) eq_(canary.mock_calls, [call(4, 5)])
def test_insert(self, connection): from psycopg2 import extras values_page_size = connection.dialect.executemany_values_page_size batch_page_size = connection.dialect.executemany_batch_page_size if connection.dialect.executemany_mode & EXECUTEMANY_VALUES: meth = extras.execute_values stmt = "INSERT INTO data (x, y) VALUES %s" expected_kwargs = { "template": "(%(x)s, %(y)s)", "page_size": values_page_size, "fetch": False, } elif connection.dialect.executemany_mode & EXECUTEMANY_BATCH: meth = extras.execute_batch stmt = "INSERT INTO data (x, y) VALUES (%(x)s, %(y)s)" expected_kwargs = {"page_size": batch_page_size} else: assert False with mock.patch.object( extras, meth.__name__, side_effect=meth ) as mock_exec: connection.execute( self.tables.data.insert(), [ {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"}, ], ) eq_( connection.execute(select(self.tables.data)).fetchall(), [ (1, "x1", "y1", 5), (2, "x2", "y2", 5), (3, "x3", "y3", 5), ], ) eq_( mock_exec.mock_calls, [ mock.call( mock.ANY, stmt, ( {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"}, ), **expected_kwargs ) ], )
def test_validator_w_removes(self): users, addresses, Address = (self.tables.users, self.tables.addresses, self.classes.Address) canary = Mock() class User(fixtures.ComparableEntity): @validates('name', include_removes=True) def validate_name(self, key, item, remove): canary(key, item, remove) return item @validates('addresses', include_removes=True) def validate_address(self, key, item, remove): canary(key, item, remove) return item mapper(User, users, properties={'addresses': relationship(Address)}) mapper(Address, addresses) u1 = User() u1.name = "ed" u1.name = "mary" del u1.name a1, a2, a3 = Address(), Address(), Address() u1.addresses.append(a1) u1.addresses.remove(a1) u1.addresses = [a1, a2] u1.addresses = [a2, a3] eq_( canary.mock_calls, [ call('name', 'ed', False), call('name', 'mary', False), call('name', 'mary', True), # append a1 call('addresses', a1, False), # remove a1 call('addresses', a1, True), # set to [a1, a2] - this is two appends call('addresses', a1, False), call('addresses', a2, False), # set to [a2, a3] - this is a remove of a1, # append of a3. the appends are first. call('addresses', a3, False), call('addresses', a1, True), ])
def setup(self): self.dbapi = MockDBAPI() self.db = testing_engine( 'postgresql://*****:*****@localhost/test', options=dict(module=self.dbapi, _initialize=False)) self.mock_connect = call(host='localhost', password='******', user='******', database='test') # monkeypatch disconnect checker self.db.dialect.is_disconnect = lambda e, conn, cursor: isinstance(e, MockDisconnect)
def test_parent_class_child_instance_apply_after(self): l1 = Mock() l2 = Mock() event.listen(self.TargetFactory, "event_one", l1) element = self.TargetFactory().create() element.run_event(1) event.listen(element, "event_one", l2) element.run_event(2) element.run_event(3) eq_( l1.mock_calls, [call(element, 1), call(element, 2), call(element, 3)], ) eq_(l2.mock_calls, [call(element, 2), call(element, 3)])
def test_cursor_shutdown_in_initialize(self): db = self._fixture(True, True) assert_raises_message( exc.SAWarning, "Exception attempting to detect", db.connect ) eq_( db.pool.logger.error.mock_calls, [call('Error closing cursor', exc_info=True)] )
async def test_codec_registration(self, metadata, async_testing_engine, methname): """test new hooks added for #7284""" engine = async_testing_engine() with mock.patch.object(engine.dialect, methname) as codec_meth: conn = await engine.connect() adapted_conn = (await conn.get_raw_connection()).connection await conn.close() eq_(codec_meth.mock_calls, [mock.call(adapted_conn)])
def test_double_event_wrapped(self): # this is issue #3199 Target = self._wrapped_fixture() listen_one = Mock() t1 = Target() event.listen(t1, "event_one", listen_one) event.listen(t1, "event_one", listen_one) t1.dispatch.event_one("t1") # doubles are eliminated eq_(listen_one.mock_calls, [call("adapted t1")]) # only one remove needed event.remove(t1, "event_one", listen_one) t1.dispatch.event_one("t2") eq_(listen_one.mock_calls, [call("adapted t1")])
def test_modified_event(self): canary = mock.Mock() event.listen(Foo.data, "modified", canary) f1 = Foo(data={"a": "b"}) f1.data["a"] = "c" eq_(canary.mock_calls, [ mock.call(f1, attributes.Event(Foo.data.impl, attributes.OP_MODIFIED)) ])
def test_kw_accept_plus_kw(self): TargetOne = self._fixture() canary = Mock() @event.listens_for(TargetOne, "event_two", named=True) def handler1(**kw): canary(kw) TargetOne().dispatch.event_two(4, 5, z=8, q=5) eq_(canary.mock_calls, [call({"x": 4, "y": 5, "z": 8, "q": 5})])
def test_copy_dep_warning(self, cls): obj = cls.__new__(cls) with mock.patch.object(cls, "_copy") as _copy: with testing.expect_deprecated(r"The %s\(\) method is deprecated" % cls.copy.__qualname__): obj.copy(schema="s", target_table="tt", arbitrary="arb") eq_( _copy.mock_calls, [mock.call(target_table="tt", schema="s", arbitrary="arb")], )
def test_insert_w_newlines(self, connection): from psycopg2 import extras t = self.tables.data ins = ( t.insert() .inline() .values( id=bindparam("id"), x=select(literal_column("5")) .select_from(self.tables.data) .scalar_subquery(), y=bindparam("y"), z=bindparam("z"), ) ) # compiled SQL has a newline in it eq_( str(ins.compile(testing.db)), "INSERT INTO data (id, x, y, z) VALUES (%(id)s, " "(SELECT 5 \nFROM data), %(y)s, %(z)s)", ) meth = extras.execute_values with mock.patch.object( extras, "execute_values", side_effect=meth ) as mock_exec: connection.execute( ins, [ {"id": 1, "y": "y1", "z": 1}, {"id": 2, "y": "y2", "z": 2}, {"id": 3, "y": "y3", "z": 3}, ], ) eq_( mock_exec.mock_calls, [ mock.call( mock.ANY, "INSERT INTO data (id, x, y, z) VALUES %s", ( {"id": 1, "y": "y1", "z": 1}, {"id": 2, "y": "y2", "z": 2}, {"id": 3, "y": "y3", "z": 3}, ), template="(%(id)s, (SELECT 5 \nFROM data), %(y)s, %(z)s)", fetch=False, page_size=connection.dialect.executemany_values_page_size, ) ], )
def test_legacy_accept_from_method(self): canary = Mock() class MyClass(object): def handler1(self, x, y): canary(x, y) event.listen(self.TargetOne, "event_three", MyClass().handler1) self.TargetOne().dispatch.event_three(4, 5, 6, 7) eq_(canary.mock_calls, [call(4, 5)])
def test_kw_ok(self): l1 = Mock() def listen(**kw): l1(kw) event.listen(self.TargetFactory, "event_one", listen, named=True) element = self.TargetFactory().create() element.run_event(1) element.run_event(2) eq_(l1.mock_calls, [ call({ "target": element, "arg": 1 }), call({ "target": element, "arg": 2 }), ])
def test_standard_accept_has_legacies(self): canary = Mock() event.listen(self.TargetOne, "event_three", canary) self.TargetOne().dispatch.event_three(4, 5) eq_( canary.mock_calls, [call(4, 5)] )
def test_no_cache_for_event(self, modify_query_fixture): m1 = modify_query_fixture(False) User, Address = self._o2m_fixture() sess = Session() u1 = sess.query(User).filter(User.id == 7).first() u1.addresses eq_(m1.mock_calls, [mock.call(User), mock.call(Address)]) sess.expire(u1, ["addresses"]) u1.addresses eq_( m1.mock_calls, [mock.call(User), mock.call(Address), mock.call(Address)], )
def test_insert_no_page_size(self): from psycopg2 import extras eng = self.engine if eng.dialect.executemany_mode is EXECUTEMANY_BATCH: meth = extras.execute_batch stmt = "INSERT INTO data (x, y) VALUES (%(x)s, %(y)s)" expected_kwargs = {} else: meth = extras.execute_values stmt = "INSERT INTO data (x, y) VALUES %s" expected_kwargs = {"template": "(%(x)s, %(y)s)"} with mock.patch.object(extras, meth.__name__, side_effect=meth) as mock_exec: with eng.connect() as conn: conn.execute( self.tables.data.insert(), [ { "x": "x1", "y": "y1" }, { "x": "x2", "y": "y2" }, { "x": "x3", "y": "y3" }, ], ) eq_( mock_exec.mock_calls, [ mock.call(mock.ANY, stmt, ( { "x": "x1", "y": "y1" }, { "x": "x2", "y": "y2" }, { "x": "x3", "y": "y3" }, ), **expected_kwargs) ], )
def test_complex_legacy_accept(self): canary = Mock() @event.listens_for(self.TargetOne, "event_six") def handler1(x, y, z, q): canary(x, y, z, q) self.TargetOne().dispatch.event_six(4, 5) eq_( canary.mock_calls, [call(4, 5, 9, 20)] )
def test_owner_database_pairs_dont_use_for_same_db(self): dialect = mssql.dialect() identifier = "my_db.some_schema" schema, owner = base._owner_plus_db(dialect, identifier) mock_connection = mock.Mock(dialect=dialect, scalar=mock.Mock(return_value="my_db")) mock_lambda = mock.Mock() base._switch_db(schema, mock_connection, mock_lambda, "x", y="bar") eq_(mock_connection.mock_calls, [mock.call.scalar("select db_name()")]) eq_(mock_lambda.mock_calls, [mock.call("x", y="bar")])
def test_partial_kw_accept(self): TargetOne = self._fixture() canary = Mock() @event.listens_for(TargetOne, "event_five", named=True) def handler1(z, y, **kw): canary(z, y, kw) TargetOne().dispatch.event_five(4, 5, 6, 7) eq_(canary.mock_calls, [call(6, 5, {"x": 4, "q": 7})])
def test_kw_accept(self): TargetOne = self._fixture() canary = Mock() @event.listens_for(TargetOne, "event_one", named=True) def handler1(**kw): canary(kw) TargetOne().dispatch.event_one(4, 5) eq_(canary.mock_calls, [call({"x": 4, "y": 5})])
def test_kw_accept_wrapped(self): TargetOne = self._wrapped_fixture() canary = Mock() @event.listens_for(TargetOne, "event_one", named=True) def handler1(**kw): canary(kw) TargetOne().dispatch.event_one(4, 5) eq_(canary.mock_calls, [call({'y': 'adapted 5', 'x': 'adapted 4'})])
async def test_sync_before_commit(self, async_session): canary = mock.Mock() event.listen(async_session.sync_session, "before_commit", canary) async with async_session.begin(): pass eq_( canary.mock_calls, [mock.call(async_session.sync_session)], )
def test_invalidate_trans(self): conn = self.db.connect() trans = conn.begin() self.dbapi.shutdown() assert_raises( tsa.exc.DBAPIError, conn.execute, select([1]) ) eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()]] ) assert not conn.closed assert conn.invalidated assert trans.is_active assert_raises_message( tsa.exc.StatementError, "Can't reconnect until invalid transaction is rolled back", conn.execute, select([1]) ) assert trans.is_active assert_raises_message( tsa.exc.InvalidRequestError, "Can't reconnect until invalid transaction is " "rolled back", trans.commit ) assert trans.is_active trans.rollback() assert not trans.is_active conn.execute(select([1])) assert not conn.invalidated eq_( [c.close.mock_calls for c in self.dbapi.connections], [[call()], []] )
def test_insert_page_size(self): from psycopg2 import extras opts = self.options.copy() opts["executemany_batch_page_size"] = 500 opts["executemany_values_page_size"] = 1000 eng = engines.testing_engine(options=opts) if eng.dialect.executemany_mode & EXECUTEMANY_VALUES: meth = extras.execute_values stmt = "INSERT INTO data (x, y) VALUES %s" expected_kwargs = { "fetch": False, "page_size": 1000, "template": "(%(x)s, %(y)s)", } elif eng.dialect.executemany_mode & EXECUTEMANY_BATCH: meth = extras.execute_batch stmt = "INSERT INTO data (x, y) VALUES (%(x)s, %(y)s)" expected_kwargs = {"page_size": 500} else: assert False with mock.patch.object( extras, meth.__name__, side_effect=meth ) as mock_exec: with eng.connect() as conn: conn.execute( self.tables.data.insert(), [ {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"}, ], ) eq_( mock_exec.mock_calls, [ mock.call( mock.ANY, stmt, ( {"x": "x1", "y": "y1"}, {"x": "x2", "y": "y2"}, {"x": "x3", "y": "y3"}, ), **expected_kwargs ) ], )
def test_legacy_accept(self): canary = Mock() @event.listens_for(self.TargetOne, "event_three") def handler1(x, y): canary(x, y) self.TargetOne().dispatch.event_three(4, 5, 6, 7) eq_( canary.mock_calls, [call(4, 5)] )
def test_kw_accept_has_legacies(self): canary = Mock() @event.listens_for(self.TargetOne, "event_three", named=True) def handler1(**kw): canary(kw) self.TargetOne().dispatch.event_three(4, 5, 6, 7) eq_( canary.mock_calls, [call({"x": 4, "y": 5, "z": 6, "q": 7})] )
def test_kw_accept_plus_kw_has_legacies(self): canary = Mock() @event.listens_for(self.TargetOne, "event_four", named=True) def handler1(**kw): canary(kw) self.TargetOne().dispatch.event_four(4, 5, 6, 7, foo="bar") eq_( canary.mock_calls, [call({"x": 4, "y": 5, "z": 6, "q": 7, "foo": "bar"})] )
async def test_sync_before_cursor_execute_engine(self, async_engine): canary = mock.Mock() event.listen(async_engine.sync_engine, "before_cursor_execute", canary) async with async_engine.connect() as conn: sync_conn = conn.sync_connection await conn.execute(text("select 1")) eq_( canary.mock_calls, [mock.call(sync_conn, mock.ANY, "select 1", (), mock.ANY, False)], )
def test_conn_reusable(self): conn = self.db.connect() conn.execute(select([1])) eq_(self.dbapi.connect.mock_calls, [self.mock_connect]) self.dbapi.shutdown() assert_raises(tsa.exc.DBAPIError, conn.execute, select([1])) assert not conn.closed assert conn.invalidated eq_([c.close.mock_calls for c in self.dbapi.connections], [[call()]]) # test reconnects conn.execute(select([1])) assert not conn.invalidated eq_([c.close.mock_calls for c in self.dbapi.connections], [[call()], []])
def test_plugin_url_registration(self): from sqlalchemy.dialects import sqlite global MyEnginePlugin def side_effect(url, kw): eq_( url.query, { "plugin": "engineplugin", "myplugin_arg": "bat", "foo": "bar", }, ) eq_(kw, {"logging_name": "foob"}) kw["logging_name"] = "bar" return MyEnginePlugin def update_url(url): return url.difference_update_query(["myplugin_arg"]) MyEnginePlugin = Mock(side_effect=side_effect, update_url=update_url) plugins.register("engineplugin", __name__, "MyEnginePlugin") e = create_engine( "sqlite:///?plugin=engineplugin&foo=bar&myplugin_arg=bat", logging_name="foob", ) eq_(e.dialect.name, "sqlite") eq_(e.logging_name, "bar") # plugin args are removed from URL. eq_(e.url.query, {"foo": "bar"}) assert isinstance(e.dialect, sqlite.dialect) eq_( MyEnginePlugin.mock_calls, [ call( url.make_url( "sqlite:///?plugin=engineplugin" "&foo=bar&myplugin_arg=bat" ), {}, ), call.handle_dialect_kwargs(sqlite.dialect, mock.ANY), call.handle_pool_kwargs(mock.ANY, {"dialect": e.dialect}), call.engine_created(e), ], )