def test_metadata_drop_both(self): metadata, bind = self.metadata, self.bind canary = mock.Mock() event.listen(metadata, 'before_drop', canary.before_drop) event.listen(metadata, 'after_drop', canary.after_drop) metadata.create_all(bind) metadata.drop_all(bind) eq_(canary.mock_calls, [ mock.call.before_drop(metadata, self.bind, checkfirst=False, tables=list(metadata.tables.values()), _ddl_runner=mock.ANY), mock.call.after_drop(metadata, self.bind, checkfirst=False, tables=list(metadata.tables.values()), _ddl_runner=mock.ANY), ])
def test_table_drop_both(self): table, bind = self.table, self.bind canary = mock.Mock() event.listen(table, 'before_drop', canary.before_drop) event.listen(table, 'after_drop', canary.after_drop) table.create(bind) table.drop(bind) eq_(canary.mock_calls, [ mock.call.before_drop(table, self.bind, checkfirst=False, _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), mock.call.after_drop(table, self.bind, checkfirst=False, _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), ])
def test_metadata_create_before(self): metadata, bind = self.metadata, self.bind canary = mock.Mock() event.listen(metadata, "before_create", canary.before_create) metadata.create_all(bind) metadata.drop_all(bind) eq_( canary.mock_calls, [ mock.call.before_create( # checkfirst is False because of the MockConnection # used in the current testing strategy. metadata, self.bind, checkfirst=False, tables=list(metadata.tables.values()), _ddl_runner=mock.ANY, ) ], )
def test_singleton_gc(self): counter = mock.Mock() class Mixin(object): @declared_attr def my_prop(cls): counter(cls.__name__) return Column('x', Integer) class A(Base, Mixin): __tablename__ = 'b' id = Column(Integer, primary_key=True) @declared_attr def my_other_prop(cls): return column_property(cls.my_prop + 5) eq_(counter.mock_calls, [mock.call("A")]) del A gc_collect() assert "A" not in Base._decl_class_registry
def test_disable_on_session(self): User = self.classes.User canary = mock.Mock() def fn1(s): canary.fn1() return s.query(User.id, User.name).order_by(User.id) def fn2(q): canary.fn2() return q.filter(User.id == bindparam("id")) def fn3(q): canary.fn3() return q for x in range(3): bq = self.bakery(fn1) bq += fn2 sess = fixture_session(enable_baked_queries=False) eq_(bq.add_criteria(fn3)(sess).params(id=7).all(), [(7, "jack")]) eq_( canary.mock_calls, [ mock.call.fn1(), mock.call.fn2(), mock.call.fn3(), mock.call.fn1(), mock.call.fn2(), mock.call.fn3(), mock.call.fn1(), mock.call.fn2(), mock.call.fn3(), ], )
def test_encoding_errors_cx_oracle_py3k( self, cx_Oracle, cx_oracle_type, use_read ): ignore_dialect = cx_oracle.dialect( dbapi=cx_Oracle, encoding_errors="ignore" ) ignore_outputhandler = ( ignore_dialect._generate_connection_outputtype_handler() ) cursor = mock.Mock() ignore_outputhandler(cursor, "foo", cx_oracle_type, None, None, None) if use_read: eq_( cursor.mock_calls, [ mock.call.var( mock.ANY, None, cursor.arraysize, encodingErrors="ignore", outconverter=mock.ANY, ) ], ) else: eq_( cursor.mock_calls, [ mock.call.var( mock.ANY, None, cursor.arraysize, encodingErrors="ignore", ) ], )
def modify_query_fixture(self): def set_event(bake_ok): event.listen( Query, "before_compile", _modify_query, retval=True, bake_ok=bake_ok, ) return m1 m1 = mock.Mock() def _modify_query(query): m1(query.column_descriptions[0]["entity"]) query = query.enable_assertions(False).filter( literal_column("1") == 1) return query yield set_event event.remove(Query, "before_compile", _modify_query)
def test_spoiled_full_w_params(self): User = self.classes.User canary = mock.Mock() def fn1(s): canary.fn1() return s.query(User.id, User.name).order_by(User.id) def fn2(q): canary.fn2() return q.filter(User.id == bindparam('id')) def fn3(q): canary.fn3() return q for x in range(3): bq = self.bakery(fn1) bq += fn2 sess = Session(autocommit=True) eq_( bq.spoil(full=True).add_criteria(fn3)(sess).params(id=7).all(), [(7, 'jack')]) eq_(canary.mock_calls, [ mock.call.fn1(), mock.call.fn2(), mock.call.fn3(), mock.call.fn1(), mock.call.fn2(), mock.call.fn3(), mock.call.fn1(), mock.call.fn2(), mock.call.fn3() ])
def test_dbapi_clsmethod_renamed(self): """The dbapi() class method is renamed to import_dbapi(), so that the .dbapi attribute can be exclusively an instance attribute. """ from sqlalchemy.dialects.sqlite import pysqlite from sqlalchemy.dialects import registry canary = mock.Mock() class MyDialect(pysqlite.SQLiteDialect_pysqlite): @classmethod def dbapi(cls): canary() return __import__("sqlite3") tokens = __name__.split(".") global dialect dialect = MyDialect registry.register("mockdialect1.sqlite", ".".join(tokens[0:-1]), tokens[-1]) with expect_deprecated( r"The dbapi\(\) classmethod on dialect classes has " r"been renamed to import_dbapi\(\). Implement an " r"import_dbapi\(\) classmethod directly on class " r".*MyDialect.* to remove this warning; the old " r".dbapi\(\) classmethod may be maintained for backwards " r"compatibility."): e = create_engine("mockdialect1+sqlite://") eq_(canary.mock_calls, [mock.call()]) sqlite3 = __import__("sqlite3") is_(e.dialect.dbapi, sqlite3)
def test_parameters(self, exec_type, usemethod, connection): collect = mock.Mock() @self._fixture def fn(context): collect(context.get_current_parameters()) table = self.tables.some_table if exec_type in ("multivalues", "executemany"): parameters = [{"y": "h1"}, {"y": "h2"}] else: parameters = [{"y": "hello"}] if exec_type == "multivalues": stmt, params = table.insert().values(parameters), {} else: stmt, params = table.insert(), parameters connection.execute(stmt, params) eq_( collect.mock_calls, [mock.call({"y": param["y"], "x": None}) for param in parameters], )
def _test_baked_lazy_loading_relationship_flag(self, flag): baked.bake_lazy_loaders() try: User, Address = self._o2m_fixture(bake_queries=flag) sess = Session() u1 = sess.query(User).first() from sqlalchemy.orm import Query canary = mock.Mock() # I would think Mock can do this but apparently # it cannot (wrap / autospec don't work together) real_compile_context = Query._compile_context def _my_compile_context(*arg, **kw): if arg[0].column_descriptions[0]['entity'] is Address: canary() return real_compile_context(*arg, **kw) with mock.patch.object( Query, "_compile_context", _my_compile_context ): u1.addresses sess.expire(u1) u1.addresses finally: baked.unbake_lazy_loaders() if flag: eq_(canary.call_count, 1) else: eq_(canary.call_count, 2)
def test_mixin_attr_refers_to_column_copies(self): # this @declared_attr can refer to User.id # freely because we now do the "copy column" operation # before the declared_attr is invoked. counter = mock.Mock() class HasAddressCount(object): id = Column(Integer, primary_key=True) @declared_attr def address_count(cls): counter(cls.id) return column_property( select([func.count(Address.id) ]).where(Address.user_id == cls.id).as_scalar()) class Address(Base): __tablename__ = 'address' id = Column(Integer, primary_key=True) user_id = Column(ForeignKey('user.id')) class User(Base, HasAddressCount): __tablename__ = 'user' eq_(counter.mock_calls, [mock.call(User.id)]) sess = Session() self.assert_compile( sess.query(User).having(User.address_count > 5), 'SELECT (SELECT count(address.id) AS ' 'count_1 FROM address WHERE address.user_id = "user".id) ' 'AS anon_1, "user".id AS user_id FROM "user" ' 'HAVING (SELECT count(address.id) AS ' 'count_1 FROM address WHERE address.user_id = "user".id) ' '> :param_1')
def test_distill_single_list_strings(self): eq_( self.module._distill_params(mock.Mock(), (["foo", "bar"], ), {}), [["foo", "bar"]], )
def test_distill_none(self): eq_(self.module._distill_params(mock.Mock(), None, None), [])
def test_distill_no_multi_no_param(self): eq_(self.module._distill_params(mock.Mock(), (), {}), [])
def op_fixture( dialect="default", as_sql=False, naming_convention=None, literal_binds=False, native_boolean=None, ): opts = {} if naming_convention: opts["target_metadata"] = MetaData(naming_convention=naming_convention) class buffer_(object): def __init__(self): self.lines = [] def write(self, msg): msg = msg.strip() msg = re.sub(r"[\n\t]", "", msg) if as_sql: # the impl produces soft tabs, # so search for blocks of 4 spaces msg = re.sub(r" ", "", msg) msg = re.sub(r"\;\n*$", "", msg) self.lines.append(msg) def flush(self): pass buf = buffer_() class ctx(MigrationContext): def get_buf(self): return buf def clear_assertions(self): buf.lines[:] = [] def assert_(self, *sql): # TODO: make this more flexible about # whitespace and such eq_(buf.lines, [re.sub(r"[\n\t]", "", s) for s in sql]) def assert_contains(self, sql): for stmt in buf.lines: if re.sub(r"[\n\t]", "", sql) in stmt: return else: assert False, "Could not locate fragment %r in %r" % ( sql, buf.lines, ) if as_sql: opts["as_sql"] = as_sql if literal_binds: opts["literal_binds"] = literal_binds if dialect == "mariadb": ctx_dialect = _get_dialect("mysql") ctx_dialect.server_version_info = (10, 0, 0, "MariaDB") else: ctx_dialect = _get_dialect(dialect) if native_boolean is not None: ctx_dialect.supports_native_boolean = native_boolean # this is new as of SQLAlchemy 1.2.7 and is used by SQL Server, # which breaks assumptions in the alembic test suite ctx_dialect.non_native_boolean_check_constraint = True if not as_sql: def execute(stmt, *multiparam, **param): if isinstance(stmt, string_types): stmt = text(stmt) assert stmt.supports_execution sql = text_type(stmt.compile(dialect=ctx_dialect)) buf.write(sql) connection = mock.Mock(dialect=ctx_dialect, execute=execute) else: opts["output_buffer"] = buf connection = None context = ctx(ctx_dialect, connection, opts) alembic.op._proxy = Operations(context) return context
def test_get_categories_failed(session): def raise_error(param): raise SQLAlchemyError() session.query = mock.Mock(side_effect=raise_error) assert get_user_categories(session, 1) == []
def _connection_for_bind(self, bind, **kw): canary._connection_for_bind(bind, **kw) return mock.Mock()
def _stpool_logging_fixture(self): logging.getLogger("sqlalchemy.pool").setLevel(logging.DEBUG) return tsa.pool.SingletonThreadPool(creator=mock.Mock())
def test_distill_single_string(self): eq_(self.module._distill_params(mock.Mock(), ("arg", ), {}), [["arg"]])
def test_distill_single_list_tuple(self): eq_( self.module._distill_params(mock.Mock(), ([("foo", "bar")], ), {}), [("foo", "bar")], )
def test_engine_convert_unicode(self): with testing.expect_deprecated( "The create_engine.convert_unicode parameter and " "corresponding dialect-level"): create_engine("mysql://", convert_unicode=True, module=mock.Mock())
def test_custom_bind(self): Address, addresses, users, User = ( self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User, ) mapper( User, users, properties=dict( addresses=relationship( mapper(Address, addresses), lazy="select", primaryjoin=and_( users.c.id == addresses.c.user_id, users.c.name == bindparam("name"), ), ) ), ) canary = mock.Mock() class MyOption(MapperOption): propagate_to_loaders = True def __init__(self, crit): self.crit = crit def process_query_conditionally(self, query): """process query during a lazyload""" canary() query._params = query._params.union(dict(name=self.crit)) s = Session() ed = s.query(User).options(MyOption("ed")).filter_by(name="ed").one() eq_( ed.addresses, [ Address(id=2, user_id=8), Address(id=3, user_id=8), Address(id=4, user_id=8), ], ) eq_(canary.mock_calls, [mock.call()]) fred = ( s.query(User).options(MyOption("ed")).filter_by(name="fred").one() ) eq_(fred.addresses, []) # fred is missing eq_(canary.mock_calls, [mock.call(), mock.call()]) # the lazy query was not cached; the option is re-applied to the # Fred object due to populate_existing() fred = ( s.query(User) .populate_existing() .options(MyOption("fred")) .filter_by(name="fred") .one() ) eq_(fred.addresses, [Address(id=5, user_id=9)]) # fred is there eq_(canary.mock_calls, [mock.call(), mock.call(), mock.call()])
def test_prop_on_base(self): """test [ticket:2670]""" counter = mock.Mock() class Something(Base): __tablename__ = "something" id = Column(Integer, primary_key=True) class AbstractConcreteAbstraction(AbstractConcreteBase, Base): id = Column(Integer, primary_key=True) x = Column(Integer) y = Column(Integer) @declared_attr def something_id(cls): return Column(ForeignKey(Something.id)) @declared_attr def something(cls): counter(cls, "something") return relationship("Something") @declared_attr def something_else(cls): counter(cls, "something_else") return relationship("Something", viewonly=True) class ConcreteConcreteAbstraction(AbstractConcreteAbstraction): __tablename__ = "cca" __mapper_args__ = {"polymorphic_identity": "ccb", "concrete": True} # concrete is mapped, the abstract base is not (yet) assert ConcreteConcreteAbstraction.__mapper__ assert not hasattr(AbstractConcreteAbstraction, "__mapper__") session = Session() self.assert_compile( session.query(ConcreteConcreteAbstraction).filter( ConcreteConcreteAbstraction.something.has(id=1)), "SELECT cca.id AS cca_id, cca.x AS cca_x, cca.y AS cca_y, " "cca.something_id AS cca_something_id FROM cca WHERE EXISTS " "(SELECT 1 FROM something WHERE something.id = cca.something_id " "AND something.id = :id_1)", ) # now it is assert AbstractConcreteAbstraction.__mapper__ self.assert_compile( session.query(ConcreteConcreteAbstraction).filter( ConcreteConcreteAbstraction.something_else.has(id=1)), "SELECT cca.id AS cca_id, cca.x AS cca_x, cca.y AS cca_y, " "cca.something_id AS cca_something_id FROM cca WHERE EXISTS " "(SELECT 1 FROM something WHERE something.id = cca.something_id " "AND something.id = :id_1)", ) self.assert_compile( session.query(AbstractConcreteAbstraction).filter( AbstractConcreteAbstraction.something.has(id=1)), "SELECT pjoin.id AS pjoin_id, pjoin.x AS pjoin_x, " "pjoin.y AS pjoin_y, pjoin.something_id AS pjoin_something_id, " "pjoin.type AS pjoin_type FROM " "(SELECT cca.id AS id, cca.x AS x, cca.y AS y, " "cca.something_id AS something_id, 'ccb' AS type FROM cca) " "AS pjoin WHERE EXISTS (SELECT 1 FROM something " "WHERE something.id = pjoin.something_id " "AND something.id = :id_1)", ) self.assert_compile( session.query(AbstractConcreteAbstraction).filter( AbstractConcreteAbstraction.something_else.has(id=1)), "SELECT pjoin.id AS pjoin_id, pjoin.x AS pjoin_x, " "pjoin.y AS pjoin_y, pjoin.something_id AS pjoin_something_id, " "pjoin.type AS pjoin_type FROM " "(SELECT cca.id AS id, cca.x AS x, cca.y AS y, " "cca.something_id AS something_id, 'ccb' AS type FROM cca) " "AS pjoin WHERE EXISTS (SELECT 1 FROM something " "WHERE something.id = pjoin.something_id AND " "something.id = :id_1)", )
def test_distill_multi_strings(self): eq_( self.module._distill_params(mock.Mock(), ("foo", "bar"), {}), [("foo", "bar")], )
def _queuepool_logging_fixture(self): logging.getLogger("sqlalchemy.pool").setLevel(logging.DEBUG) return tsa.pool.QueuePool(creator=mock.Mock())
def test_distill_multi_string_tuple(self): eq_( self.module._distill_params(mock.Mock(), (("arg", "arg"), ), {}), [("arg", "arg")], )
def _stpool_echo_fixture(self): return tsa.pool.SingletonThreadPool(creator=mock.Mock(), echo='debug')
def __init__(self, connection_fairy): self.cursor = connection_fairy.connection.cursor() self.mock = mock.Mock() connection_fairy.info["mock"] = self.mock
def _queuepool_echo_fixture(self): return tsa.pool.QueuePool(creator=mock.Mock(), echo='debug')