def test_scalar(self): users = self.tables.users canary = Mock() class User(fixtures.ComparableEntity): @validates("name") def validate_name(self, key, name): canary(key, name) ne_(name, "fred") return name + " modified" mapper(User, users) sess = Session() u1 = User(name="ed") eq_(u1.name, "ed modified") assert_raises(AssertionError, setattr, u1, "name", "fred") eq_(u1.name, "ed modified") eq_(canary.mock_calls, [call("name", "ed"), call("name", "fred")]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name="ed modified").one(), User(name="ed"), )
def test_pending_combines_with_flushed(self): """test the combination of unflushed pending + lazy loaded from DB.""" Item, Keyword = (self.classes.Item, self.classes.Keyword) session = Session(testing.db, autoflush=False) k1 = Keyword(name="k1") k2 = Keyword(name="k2") i1 = Item(description="i1", keywords=[k1]) session.add(i1) session.add(k2) session.commit() k2.items.append(i1) # the pending # list is still here. eq_( set( attributes.instance_state( i1)._pending_mutations["keywords"].added_items), set([k2]), ) # because autoflush is off, k2 is still # coming in from pending eq_(i1.keywords, [k1, k2]) # prove it didn't flush eq_(session.scalar("select count(*) from item_keywords"), 1) # the pending collection was removed assert ("keywords" not in attributes.instance_state(i1)._pending_mutations)
def test_self_referential_bidirectional_mutation(self): place, Place, place_place = ( self.tables.place, self.classes.Place, self.tables.place_place, ) mapper( Place, place, properties={ "child_places": relationship( Place, secondary=place_place, primaryjoin=place.c.place_id == place_place.c.pl1_id, secondaryjoin=place.c.place_id == place_place.c.pl2_id, order_by=place_place.c.pl2_id, backref="parent_places", ) }, ) sess = Session() p1 = Place("place1") p2 = Place("place2") p2.parent_places = [p1] sess.add_all([p1, p2]) p1.parent_places.append(p2) sess.commit() assert p1 in p2.parent_places assert p2 in p1.parent_places
def test_optimized_get(self): from sqlalchemy_1_3.ext.declarative import declarative_base Base = declarative_base(metadata=self.metadata) class Employee(Base): __tablename__ = "employee" id = Column(Integer, primary_key=True, test_needs_autoincrement=True) type = Column(String(10)) __mapper_args__ = {"polymorphic_on": type} class Engineer(Employee): __tablename__ = " engineer" id = Column(ForeignKey("employee.id"), primary_key=True) engineer_name = Column(String(50)) __mapper_args__ = {"polymorphic_identity": "engineer"} Base.metadata.create_all(testing.db) s = Session(testing.db) s.add(Engineer(engineer_name="wally")) s.commit() s.close() @assert_cycles() def go(): e1 = s.query(Employee).first() e1.engineer_name go()
def test_11_pickle(self): users = self.tables.users mapper(User, users) sess = Session() u1 = User(id=1, name="ed") sess.add(u1) sess.commit() sess.close() manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager ) manager.class_ = User state_11 = { "class_": User, "modified": False, "committed_state": {}, "instance": u1, "manager": manager, "key": (User, (1,)), "expired_attributes": set(), "expired": True, } state = sa_state.InstanceState.__new__(sa_state.InstanceState) state.__setstate__(state_11) eq_(state.identity_token, None) eq_(state.identity_key, (User, (1,), None))
def test_map_to_select(self): Base, Child = self.classes.Base, self.classes.Child base, child = self.tables.base, self.tables.child base_select = select([base]).alias() mapper( Base, base_select, polymorphic_on=base_select.c.type, polymorphic_identity="base", ) mapper(Child, child, inherits=Base, polymorphic_identity="child") sess = Session() # 2. use an id other than "1" here so can't rely on # the two inserts having the same id c1 = Child(id=12, name="c1") sess.add(c1) sess.commit() sess.close() c1 = sess.query(Child).one() eq_(c1.name, "c1")
def test_bulk_save_updated_include_unchanged(self): (User, ) = self.classes("User") s = Session(expire_on_commit=False) objects = [User(name="u1"), User(name="u2"), User(name="u3")] s.add_all(objects) s.commit() objects[0].name = "u1new" objects[2].name = "u3new" s = Session() with self.sql_execution_asserter() as asserter: s.bulk_save_objects(objects, update_changed_only=False) asserter.assert_( CompiledSQL( "UPDATE users SET name=:name WHERE " "users.id = :users_id", [ { "users_id": 1, "name": "u1new" }, { "users_id": 2, "name": "u2" }, { "users_id": 3, "name": "u3new" }, ], ))
def test_collection(self): users, addresses, Address = ( self.tables.users, self.tables.addresses, self.classes.Address, ) canary = Mock() class User(fixtures.ComparableEntity): @validates("addresses") def validate_address(self, key, ad): canary(key, ad) assert "@" in ad.email_address return ad mapper(User, users, properties={"addresses": relationship(Address)}) mapper(Address, addresses) sess = Session() u1 = User(name="edward") a0 = Address(email_address="noemail") assert_raises(AssertionError, u1.addresses.append, a0) a1 = Address(id=15, email_address="*****@*****.**") u1.addresses.append(a1) eq_(canary.mock_calls, [call("addresses", a0), call("addresses", a1)]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name="edward").one(), User( name="edward", addresses=[Address(email_address="*****@*****.**")] ), )
def test_cast_type(self): Json = self.classes.Json s = Session(testing.db) j = Json(json={"field": 10}) s.add(j) s.commit() jq = s.query(Json).filter(Json.int_field == 10).one() eq_(j.id, jq.id) jq = s.query(Json).filter(Json.text_field == "10").one() eq_(j.id, jq.id) jq = s.query(Json).filter(Json.json_field.astext == "10").one() eq_(j.id, jq.id) jq = s.query(Json).filter(Json.text_field == "wrong").first() is_(jq, None) j.json = {"field": True} s.commit() jq = s.query(Json).filter(Json.text_field == "true").one() eq_(j.id, jq.id)
def test_insert_w_fetch(self): A = self.classes.A s = Session() a1 = A(x=1) s.bulk_save_objects([a1]) s.commit()
def test_set_composite_attrs_via_selectable(self): Values, CustomValues, values, Descriptions, descriptions = ( self.classes.Values, self.classes.CustomValues, self.tables.values, self.classes.Descriptions, self.tables.descriptions, ) session = Session() d = Descriptions( custom_descriptions=CustomValues("Color", "Number"), values=[ Values(custom_values=CustomValues("Red", "5")), Values(custom_values=CustomValues("Blue", "1")), ], ) session.add(d) session.commit() eq_( testing.db.execute(descriptions.select()).fetchall(), [(1, "Color", "Number")], ) eq_( testing.db.execute(values.select()).fetchall(), [(1, 1, "Red", "5"), (2, 1, "Blue", "1")], )
def test_09_pickle(self): users = self.tables.users mapper(User, users) sess = Session() sess.add(User(id=1, name="ed")) sess.commit() sess.close() inst = User(id=1, name="ed") del inst._sa_instance_state state = sa_state.InstanceState.__new__(sa_state.InstanceState) state_09 = { "class_": User, "modified": False, "committed_state": {}, "instance": inst, "callables": {"name": state, "id": state}, "key": (User, (1,)), "expired": True, } manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager ) manager.class_ = User state_09["manager"] = manager state.__setstate__(state_09) eq_(state.expired_attributes, {"name", "id"}) sess = Session() sess.add(inst) eq_(inst.name, "ed") # test identity_token expansion eq_(sa.inspect(inst).key, (User, (1,), None))
def _roundtrip(self): Foo = Base._decl_class_registry["Foo"] Bar = Base._decl_class_registry["Bar"] s = Session(testing.db) s.add_all( [ Bar(data="d1", bar_data="b1"), Bar(data="d2", bar_data="b2"), Bar(data="d3", bar_data="b3"), Foo(data="d4"), ] ) s.commit() eq_( s.query(Foo).order_by(Foo.id).all(), [ Bar(data="d1", bar_data="b1"), Bar(data="d2", bar_data="b2"), Bar(data="d3", bar_data="b3"), Foo(data="d4"), ], )
def test_noload_append(self): # test that a load of User.addresses is not emitted # when flushing an append User, Address = self._user_address_fixture() sess = Session() u1 = User(name="jack", addresses=[Address(email_address="a1")]) sess.add(u1) sess.commit() u1_id = u1.id sess.expire_all() u1.addresses.append(Address(email_address="a2")) self.assert_sql_execution( testing.db, sess.flush, CompiledSQL( "SELECT users.id AS users_id, users.name AS users_name " "FROM users WHERE users.id = :param_1", lambda ctx: [{ "param_1": u1_id }], ), CompiledSQL( "INSERT INTO addresses (user_id, email_address) " "VALUES (:user_id, :email_address)", lambda ctx: [{ "email_address": "a2", "user_id": u1_id }], ), )
def test_threelevel_selectin_to_inline_mapped(self): self._fixture_from_geometry( { "a": { "subclasses": { "b": {"polymorphic_load": "selectin"}, "c": { "subclasses": { "d": { "polymorphic_load": "inline", "single": True, }, "e": { "polymorphic_load": "inline", "single": True, }, }, "polymorphic_load": "selectin", }, } } } ) a, b, c, d, e = self.classes("a", "b", "c", "d", "e") sess = Session() sess.add_all([d(d_data="d1"), e(e_data="e1")]) sess.commit() q = sess.query(a) result = self.assert_sql_execution( testing.db, q.all, CompiledSQL( "SELECT a.type AS a_type, a.id AS a_id, " "a.a_data AS a_a_data FROM a", {}, ), Or( CompiledSQL( "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, " "c.c_data AS c_c_data, c.e_data AS c_e_data, " "c.d_data AS c_d_data " "FROM a JOIN c ON a.id = c.id " "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id", [{"primary_keys": [1, 2]}], ), CompiledSQL( "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, " "c.c_data AS c_c_data, " "c.d_data AS c_d_data, c.e_data AS c_e_data " "FROM a JOIN c ON a.id = c.id " "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id", [{"primary_keys": [1, 2]}], ), ), ) with self.assert_statement_count(testing.db, 0): eq_(result, [d(d_data="d1"), e(e_data="e1")])
def _fixture(self): Graph, Version = self.classes.Graph, self.classes.Version sess = Session() g = Graph(Version(1, 1)) sess.add(g) sess.commit() return sess
def _fixture(self): Parent = self.classes.Parent sess = Session() sess.add_all([ Parent(data1="d1", data2="d2", data3="d3", data4="d4") for i in range(10) ]) sess.commit() sess.close()
def test_round_trip_ok(self): sess = Session() f = Foo() f.data = (3, 4) sess.add(f) sess.commit() eq_(f.data, Point(3, 4))
def test_alias_pathing(self): metadata = MetaData(self.engine) a = Table( "a", metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True), Column("bid", Integer, ForeignKey("b.id")), Column("type", String(30)), ) asub = Table( "asub", metadata, Column("id", Integer, ForeignKey("a.id"), primary_key=True), Column("data", String(30)), ) b = Table( "b", metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True), ) mapper(A, a, polymorphic_identity="a", polymorphic_on=a.c.type) mapper(ASub, asub, inherits=A, polymorphic_identity="asub") mapper(B, b, properties={"as_": relationship(A)}) metadata.create_all() sess = Session() a1 = ASub(data="a1") a2 = ASub(data="a2") a3 = ASub(data="a3") b1 = B(as_=[a1, a2, a3]) sess.add(b1) sess.commit() del sess # sqlite has a slow enough growth here # that we have to run it more times to see the # "dip" again @profile_memory(maxtimes=120) def go(): sess = Session() sess.query(B).options(subqueryload(B.as_.of_type(ASub))).all() sess.close() try: go() finally: metadata.drop_all() clear_mappers()
def test_replace(self): sess = Session() f1 = Foo(data={"a": "b"}) sess.add(f1) sess.flush() f1.data = {"b": "c"} sess.commit() eq_(f1.data, {"b": "c"})
def insert_data(cls, connection): A, B, C, D, E, F, G = cls.classes("A", "B", "C", "D", "E", "F", "G") s = Session(connection) s.add( A( bs=[B(cs=[C(ds=[D()])]), B(cs=[C()])], es=[E(fs=[F()], gs=[G()])], )) s.commit()
def _test_roundtrip(self): Edge, Point = self.classes.Edge, self.classes.Point e1 = Edge(start=Point(3, 4), end=Point(5, 6)) sess = Session() sess.add(e1) sess.commit() eq_(sess.query(Edge).one(), Edge(start=Point(3, 4), end=Point(5, 6)))
def test_persist(self): A, C, B = (self.classes.A, self.classes.C, self.classes.B) sess = Session() sess.add(A(c=C("b1", B(data="b2")))) sess.commit() a1 = sess.query(A).one() eq_(a1.c, C("b1", B(data="b2")))
def insert_data(cls, connection): A = cls.classes.A s = Session(connection) s.add_all([ A(id=i, **dict((letter, "%s%d" % (letter, i)) for letter in ["x", "y", "z", "p", "q", "r"])) for i in range(1, 1001) ]) s.commit()
def test_set_none(self): sess = Session() f1 = Foo(data=None) sess.add(f1) sess.commit() eq_(f1.data, Point(None, None)) f1.data.y = 5 sess.commit() eq_(f1.data, Point(None, 5))
def test_basic(self): sess = Session() f1 = Foo(data=Point(3, 4)) sess.add(f1) sess.flush() f1.data.x = 5 sess.commit() # we didn't get the change. eq_(f1.data.x, 3)
def test_replace_itself_still_ok(self): sess = Session() f1 = Foo(data={"a": "b"}) sess.add(f1) sess.flush() f1.data = f1.data f1.data["b"] = "c" sess.commit() eq_(f1.data, {"a": "b", "b": "c"})
def test_unrelated_flush(self): sess = Session() f1 = Foo(data={"a": "b"}, unrelated_data="unrelated") sess.add(f1) sess.flush() f1.unrelated_data = "unrelated 2" sess.flush() f1.data["a"] = "c" sess.commit() eq_(f1.data["a"], "c")
def test_unrelated_flush(self): sess = Session() f1 = Foo(data=[1, 2], unrelated_data="unrelated") sess.add(f1) sess.flush() f1.unrelated_data = "unrelated 2" sess.flush() f1.data[0] = 3 sess.commit() eq_(f1.data[0], 3)
def test_unrelated_flush(self): sess = Session() f1 = Foo(data=set([1, 2]), unrelated_data="unrelated") sess.add(f1) sess.flush() f1.unrelated_data = "unrelated 2" sess.flush() f1.data.add(3) sess.commit() eq_(f1.data, set([1, 2, 3]))