def test_outer_join_count(self): """test the join and outerjoin functions on Query""" Order, User, Address = ( self.classes.Order, self.classes.User, self.classes.Address, ) session = create_session() q = (session.query(User).outerjoin("orders", "addresses").filter( sa.or_(Order.id == None, Address.id == 1))) # noqa eq_(q.count(), 4)
def test_date_roundtrips(self, date_fixture): t, (d1, t1, d2) = date_fixture with testing.db.begin() as conn: conn.execute(t.insert(), adate=d1, adatetime=d2, atime1=t1, atime2=d2) row = conn.execute(t.select()).first() eq_( (row.adate, row.adatetime, row.atime1, row.atime2), (d1, d2, t1, d2.time()), )
def go(): eq_( [ Dest(many_a=[ A(aname="a1"), B(bname="b1"), B(bname="b2"), C(cname="c1"), ]), Dest(many_a=[A(aname="a2"), C(cname="c2")]), ], sess.query(Dest).options(joinedload(Dest.many_a)).order_by( Dest.id).all(), )
def test_get_includes_getclause(self): # test issue #3597 User = self.classes.User bq = self.bakery(lambda s: s.query(User)) for i in range(5): sess = Session() u1 = bq(sess).get(7) eq_(u1.name, "jack") sess.close() eq_(len(bq._bakery), 2) # simulate race where mapper._get_clause # may be generated more than once from sqlalchemy_1_3 import inspect del inspect(User).__dict__["_get_clause"] for i in range(5): sess = Session() u1 = bq(sess).get(7) eq_(u1.name, "jack") sess.close() eq_(len(bq._bakery), 4)
def test_func_embedded_whereclause(self, connection): """test can use next_value() in whereclause""" metadata = self.metadata t1 = Table("t", metadata, Column("x", Integer)) t1.create(testing.db) connection.execute(t1.insert(), [{"x": 1}, {"x": 300}, {"x": 301}]) s = Sequence("my_sequence") eq_( list( connection.execute(t1.select().where(t1.c.x > s.next_value())) ), [(300,), (301,)], )
def test_subclass_lookup(self, get_dilbert): session = Session() dilbert = get_dilbert(session) if self.redefine_colprop: person_attribute_name = "person_name" else: person_attribute_name = "name" eq_( session.query(Engineer).filter( getattr(Person, person_attribute_name) == "dilbert").first(), dilbert, )
def test_boolean_roundtrip_reflected(self, boolean_table, store, expected): meta2 = MetaData(testing.db) table = Table("mysql_bool", meta2, autoload=True) eq_(colspec(table.c.b3), "b3 TINYINT(1)") eq_regex(colspec(table.c.b4), r"b4 TINYINT(?:\(1\))? UNSIGNED") meta2 = MetaData(testing.db) table = Table( "mysql_bool", meta2, Column("b1", BOOLEAN), Column("b2", Boolean), Column("b3", BOOLEAN), Column("b4", BOOLEAN), autoload=True, ) eq_(colspec(table.c.b3), "b3 BOOL") eq_(colspec(table.c.b4), "b4 BOOL") with testing.db.connect() as conn: expected = expected or store conn.execute(table.insert(store)) row = conn.execute(table.select()).first() eq_(list(row), expected) for i, val in enumerate(expected): if isinstance(val, bool): self.assert_(val is row[i]) conn.execute(table.delete())
def test_computed_update_warning(self): test = self.tables.test with testing.db.connect() as conn: conn.execute(test.insert(), {"id": 1, "foo": 5}) with testing.expect_warnings( "Computed columns don't work with Oracle UPDATE"): result = conn.execute( test.update().values(foo=10).return_defaults()) # returns the *old* value eq_(result.returned_defaults, (47, )) eq_(conn.scalar(select([test.c.bar])), 52)
def test_persistence_states(self): User = self.classes.User u1 = User(name="ed") insp = inspect(u1) eq_( (insp.transient, insp.pending, insp.persistent, insp.detached), (True, False, False, False), ) s = Session(testing.db) s.add(u1) eq_( (insp.transient, insp.pending, insp.persistent, insp.detached), (False, True, False, False), ) s.flush() eq_( (insp.transient, insp.pending, insp.persistent, insp.detached), (False, False, True, False), ) s.expunge(u1) eq_( (insp.transient, insp.pending, insp.persistent, insp.detached), (False, False, False, True), )
def test_col_w_nonoptional_sequence_non_autoinc_no_firing( self, dataset_no_autoinc, connection): """When the sequence is not optional and sequences are supported, the test fails because we didn't create the sequence. """ dataset_no_autoinc.c.set_id.default.optional = False connection.execute(dataset_no_autoinc.insert()) eq_( connection.scalar( select([func.count("*")]).select_from(dataset_no_autoinc)), 1, )
def test_attrs_props_prop_added_after_configure(self): class Thing(InspectionAttr): pass class AnonClass(object): __foo__ = "bar" __bat__ = Thing() from sqlalchemy_1_3.orm import mapper, column_property from sqlalchemy_1_3.ext.hybrid import hybrid_property m = mapper(AnonClass, self.tables.users) eq_(set(inspect(AnonClass).attrs.keys()), set(["id", "name"])) eq_( set(inspect(AnonClass).all_orm_descriptors.keys()), set(["id", "name"]), ) m.add_property("q", column_property(self.tables.users.c.name)) def desc(self): return self.name AnonClass.foob = hybrid_property(desc) eq_(set(inspect(AnonClass).attrs.keys()), set(["id", "name", "q"])) eq_( set(inspect(AnonClass).all_orm_descriptors.keys()), set(["id", "name", "q", "foob"]), )
def test_delete_from_joined_subq_test(self): Document = self.classes.Document s = Session() subq = (s.query(func.max(Document.title).label("title")).group_by( Document.user_id).subquery()) s.query(Document).filter(Document.title == subq.c.title).delete( synchronize_session=False) eq_( set(s.query(Document.id, Document.flag)), set([(2, None), (3, None), (6, None)]), )
def test_fetch_before_update(self): User = self.classes.User sess = Session() john = sess.query(User).filter_by(name="john").one() sess.expire(john, ["age"]) sess.query(User).filter_by(name="john").filter_by(age=25).update( { "name": "j2", "age": 40 }, synchronize_session="fetch") eq_(john.name, "j2") eq_(john.age, 40)
def test_update_with_expire_strategy(self): User = self.classes.User sess = Session() john, jack, jill, jane = sess.query(User).order_by(User.id).all() sess.query(User).filter(User.age > 29).update( {"age": User.age - 10}, synchronize_session="fetch") eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27]) eq_( sess.query(User.age).order_by(User.id).all(), list(zip([25, 37, 29, 27])), )
def test_rudimental_round_trip(self): # note that test_suite has many more JSON round trip tests # using the backend-agnostic JSON type mysql_json = Table("mysql_json", self.metadata, Column("foo", mysql.JSON)) self.metadata.create_all() value = {"json": {"foo": "bar"}, "recs": ["one", "two"]} with testing.db.connect() as conn: conn.execute(mysql_json.insert(), foo=value) eq_(conn.scalar(select([mysql_json.c.foo])), value)
def test_repr_params_large_list_of_dict(self): eq_( repr( sql_util._repr_params( [{"data": str(i)} for i in range(100)], batches=10, ismulti=True, ) ), "[{'data': '0'}, {'data': '1'}, {'data': '2'}, {'data': '3'}, " "{'data': '4'}, {'data': '5'}, {'data': '6'}, {'data': '7'}" " ... displaying 10 of 100 total bound " "parameter sets ... {'data': '98'}, {'data': '99'}]", )
def test_log_large_multi_parameter(self): import random lp1 = "".join(chr(random.randint(52, 85)) for i in range(5)) lp2 = "".join(chr(random.randint(52, 85)) for i in range(8)) lp3 = "".join(chr(random.randint(52, 85)) for i in range(670)) self.eng.execute("SELECT ?, ?, ?", (lp1, lp2, lp3)) eq_( self.buf.buffer[1].message, "('%s', '%s', '%s ... (372 characters truncated) ... %s')" % (lp1, lp2, lp3[0:149], lp3[-149:]), )
def test_update_from_multitable(self): Engineer = self.classes.Engineer Person = self.classes.Person s = Session(testing.db) s.query(Engineer).filter(Engineer.id == Person.id).filter( Person.name == "e2").update({ Person.name: "e22", Engineer.engineer_name: "e55" }) eq_( set(s.query(Person.name, Engineer.engineer_name)), set([("e1", "e1"), ("e22", "e55")]), )
def test_clear(self): if not hasattr(list, "clear"): # py2 list doesn't have 'clear' return sess = Session() f1 = Foo(data=[1, 2]) sess.add(f1) sess.commit() f1.data.clear() sess.commit() eq_(f1.data, [])
def test_scalar(self): users = self.tables.users canary = Mock() class User(fixtures.ComparableEntity): @validates("name") def validate_name(self, key, name): canary(key, name) ne_(name, "fred") return name + " modified" mapper(User, users) sess = Session() u1 = User(name="ed") eq_(u1.name, "ed modified") assert_raises(AssertionError, setattr, u1, "name", "fred") eq_(u1.name, "ed modified") eq_(canary.mock_calls, [call("name", "ed"), call("name", "fred")]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name="ed modified").one(), User(name="ed"), )
def test_meta_getattr_two(self): class MetaPoint(type): def __getattr__(cls, key): if key == "double_x": return cls._impl_double_x raise AttributeError(key) class Point(compat.with_metaclass(MetaPoint)): @hybrid_property def _impl_double_x(self): return self.x * 2 self._fixture(Point) alias = aliased(Point) eq_(str(Point.double_x), "Point._impl_double_x") eq_(str(alias.double_x), "AliasedClass_Point._impl_double_x") eq_(str(Point.double_x.__clause_element__()), "point.x * :x_1") eq_(str(alias.double_x.__clause_element__()), "point_1.x * :x_1") sess = Session() self.assert_compile( sess.query(alias).filter(alias.double_x > Point.x), "SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, " "point_1.y AS point_1_y FROM point AS point_1, point " "WHERE point_1.x * :x_1 > point.x", )
def test_hybrid_descriptor_two(self): class Point(object): def __init__(self, x, y): self.x, self.y = x, y @hybrid_property def double_x(self): return self.x * 2 self._fixture(Point) alias = aliased(Point) eq_(str(Point.double_x), "Point.double_x") eq_(str(alias.double_x), "AliasedClass_Point.double_x") eq_(str(Point.double_x.__clause_element__()), "point.x * :x_1") eq_(str(alias.double_x.__clause_element__()), "point_1.x * :x_1") sess = Session() self.assert_compile( sess.query(alias).filter(alias.double_x > Point.x), "SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, " "point_1.y AS point_1_y FROM point AS point_1, point " "WHERE point_1.x * :x_1 > point.x", )
def test_validator_bulk_dict_set(self): users, addresses, Address = ( self.tables.users, self.tables.addresses, self.classes.Address, ) class User(fixtures.ComparableEntity): @validates("addresses", include_removes=True) def validate_address(self, key, item, remove): if not remove: assert isinstance(item, str) else: assert isinstance(item, Address) item = Address(email_address=item) return item mapper( User, users, properties={ "addresses": relationship( Address, collection_class=collections.attribute_mapped_collection( "email_address" ), ) }, ) mapper(Address, addresses) u1 = User() u1.addresses["e1"] = "e1" u1.addresses["e2"] = "e2" eq_( u1.addresses, { "e1": Address(email_address="e1"), "e2": Address(email_address="e2"), }, ) u1.addresses = {"e3": "e3", "e4": "e4"} eq_( u1.addresses, { "e3": Address(email_address="e3"), "e4": Address(email_address="e4"), }, )
def test_limit_offset_for_update(self): metadata = self.metadata # oracle can't actually do the ROWNUM thing with FOR UPDATE # very well. t = Table( "t1", metadata, Column("id", Integer, primary_key=True), Column("data", Integer), ) metadata.create_all() t.insert().execute( { "id": 1, "data": 1 }, { "id": 2, "data": 7 }, { "id": 3, "data": 12 }, { "id": 4, "data": 15 }, { "id": 5, "data": 32 }, ) # here, we can't use ORDER BY. eq_( t.select().with_for_update().limit(2).execute().fetchall(), [(1, 1), (2, 7)], ) # here, its impossible. But we'd prefer it to raise ORA-02014 # instead of issuing a syntax error. assert_raises_message( exc.DatabaseError, "ORA-02014", t.select().with_for_update().limit(2).offset(3).execute, )
def test_pool_reset_on_return_from_config(self): dbapi = mock_dbapi for value, expected in [ ("rollback", pool.reset_rollback), ("commit", pool.reset_commit), ("none", pool.reset_none), ]: config = { "sqlalchemy.url": "postgresql://*****:*****@somehost/test", "sqlalchemy.pool_reset_on_return": value, } e = engine_from_config(config, module=dbapi, _initialize=False) eq_(e.pool._reset_on_return, expected)
def test_parent_class_only(self): l1 = Mock() event.listen(self.TargetFactory, "event_one", l1) element = self.TargetFactory().create() element.run_event(1) element.run_event(2) element.run_event(3) eq_( l1.mock_calls, [call(element, 1), call(element, 2), call(element, 3)], )
def test_indirect(self): def listen(x, y): pass event.listen("one", "event_one", listen) eq_(list(self.Target().dispatch.event_one), [listen]) assert_raises( exc.InvalidRequestError, event.listen, listen, "event_one", self.Target, )
def test_exec_once(self): m1 = Mock() event.listen(self.Target, "event_one", m1) t1 = self.Target() t2 = self.Target() t1.dispatch.event_one.for_modify(t1.dispatch).exec_once(5, 6) t1.dispatch.event_one.for_modify(t1.dispatch).exec_once(7, 8) t2.dispatch.event_one.for_modify(t2.dispatch).exec_once(9, 10) eq_(m1.mock_calls, [call(5, 6), call(9, 10)])
def test_clslevel(self): Target = self._fixture() m1 = Mock() event.listen(Target, "event_two", m1) t1 = Target() t1.dispatch.event_two("x") event.remove(Target, "event_two", m1) t1.dispatch.event_two("y") eq_(m1.mock_calls, [call("x")])
def test_label_length_custom_maxlen_user_set_manually(self): eng = engines.testing_engine() eng.dialect.max_identifier_length = 47 # assume the dialect has no on-connect change with mock.patch.object( eng.dialect, "_check_max_identifier_length", side_effect=lambda conn: None, ): with eng.connect(): pass # it was maintained eq_(eng.dialect.max_identifier_length, 47)