def test_threelevel_selectin_to_inline_mapped(self): self._fixture_from_geometry( { "a": { "subclasses": { "b": {"polymorphic_load": "selectin"}, "c": { "subclasses": { "d": { "polymorphic_load": "inline", "single": True, }, "e": { "polymorphic_load": "inline", "single": True, }, }, "polymorphic_load": "selectin", }, } } } ) a, b, c, d, e = self.classes("a", "b", "c", "d", "e") sess = Session() sess.add_all([d(d_data="d1"), e(e_data="e1")]) sess.commit() q = sess.query(a) result = self.assert_sql_execution( testing.db, q.all, CompiledSQL( "SELECT a.type AS a_type, a.id AS a_id, " "a.a_data AS a_a_data FROM a", {}, ), Or( CompiledSQL( "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, " "c.c_data AS c_c_data, c.e_data AS c_e_data, " "c.d_data AS c_d_data " "FROM a JOIN c ON a.id = c.id " "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id", [{"primary_keys": [1, 2]}], ), CompiledSQL( "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, " "c.c_data AS c_c_data, " "c.d_data AS c_d_data, c.e_data AS c_e_data " "FROM a JOIN c ON a.id = c.id " "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id", [{"primary_keys": [1, 2]}], ), ), ) with self.assert_statement_count(testing.db, 0): eq_(result, [d(d_data="d1"), e(e_data="e1")])
def test_self_referential_bidirectional_mutation(self): place, Place, place_place = ( self.tables.place, self.classes.Place, self.tables.place_place, ) mapper( Place, place, properties={ "child_places": relationship( Place, secondary=place_place, primaryjoin=place.c.place_id == place_place.c.pl1_id, secondaryjoin=place.c.place_id == place_place.c.pl2_id, order_by=place_place.c.pl2_id, backref="parent_places", ) }, ) sess = Session() p1 = Place("place1") p2 = Place("place2") p2.parent_places = [p1] sess.add_all([p1, p2]) p1.parent_places.append(p2) sess.commit() assert p1 in p2.parent_places assert p2 in p1.parent_places
def _roundtrip(self): Foo = Base._decl_class_registry["Foo"] Bar = Base._decl_class_registry["Bar"] s = Session(testing.db) s.add_all( [ Bar(data="d1", bar_data="b1"), Bar(data="d2", bar_data="b2"), Bar(data="d3", bar_data="b3"), Foo(data="d4"), ] ) s.commit() eq_( s.query(Foo).order_by(Foo.id).all(), [ Bar(data="d1", bar_data="b1"), Bar(data="d2", bar_data="b2"), Bar(data="d3", bar_data="b3"), Foo(data="d4"), ], )
def test_partial_load_no_invoke_eagers(self): # test issue #4199 self._fixture_from_geometry( { "a": { "subclasses": { "a1": {"polymorphic_load": "selectin"}, "a2": {"polymorphic_load": "selectin"}, } } } ) a, a1, a2 = self.classes("a", "a1", "a2") sess = Session() a1_obj = a1() a2_obj = a2() sess.add_all([a1_obj, a2_obj]) del a2_obj sess.flush() sess.expire_all() # _with_invoke_all_eagers(False), used by the lazy loader # strategy, will cause one less state to be present such that # the poly loader won't locate a state limited to the "a1" mapper, # needs to test that it has states sess.query(a)._with_invoke_all_eagers(False).all()
def test_bulk_save_updated_include_unchanged(self): (User, ) = self.classes("User") s = Session(expire_on_commit=False) objects = [User(name="u1"), User(name="u2"), User(name="u3")] s.add_all(objects) s.commit() objects[0].name = "u1new" objects[2].name = "u3new" s = Session() with self.sql_execution_asserter() as asserter: s.bulk_save_objects(objects, update_changed_only=False) asserter.assert_( CompiledSQL( "UPDATE users SET name=:name WHERE " "users.id = :users_id", [ { "users_id": 1, "name": "u1new" }, { "users_id": 2, "name": "u2" }, { "users_id": 3, "name": "u3new" }, ], ))
def _unhashable_fixture(self, metadata, load_on_pending=False): class MyHashType(sa.TypeDecorator): impl = sa.String(100) def process_bind_param(self, value, dialect): return ";".join( "%s=%s" % (k, v) for k, v in sorted(value.items(), key=lambda key: key[0])) def process_result_value(self, value, dialect): return dict(elem.split("=", 1) for elem in value.split(";")) category = Table( "category", metadata, Column("id", Integer, primary_key=True), Column("data", MyHashType()), ) article = Table( "article", metadata, Column("id", Integer, primary_key=True), Column("data", MyHashType()), ) class Category(fixtures.ComparableEntity): pass class Article(fixtures.ComparableEntity): pass mapper(Category, category) mapper( Article, article, properties={ "category": relationship( Category, primaryjoin=orm.foreign(article.c.data) == category.c.data, load_on_pending=load_on_pending, ) }, ) metadata.create_all() sess = Session(autoflush=False) data = {"im": "unhashable"} a1 = Article(id=1, data=data) c1 = Category(id=1, data=data) if load_on_pending: sess.add(c1) else: sess.add_all([c1, a1]) sess.flush() if load_on_pending: sess.add(a1) return Category, Article, sess, a1, c1
def _fixture(self): Parent = self.classes.Parent sess = Session() sess.add_all([ Parent(data1="d1", data2="d2", data3="d3", data4="d4") for i in range(10) ]) sess.commit() sess.close()
def insert_data(cls, connection): A = cls.classes.A s = Session(connection) s.add_all([ A(id=i, **dict((letter, "%s%d" % (letter, i)) for letter in ["x", "y", "z", "p", "q", "r"])) for i in range(1, 1001) ]) s.commit()
def insert_data(cls, connection): Parent, ChildSubclass1, Other = cls.classes( "Parent", "ChildSubclass1", "Other" ) session = Session(connection) parent = Parent(id=1) subclass1 = ChildSubclass1(id=1, parent=parent) other = Other(id=1, child_subclass=subclass1) session.add_all([parent, subclass1, other]) session.commit()
def insert_data(cls, connection): A, B, C = cls.classes("A", "B", "C") s = Session(connection) s.add_all([A(id=i) for i in range(1, 4)]) s.flush() s.add_all([ B(a_id=i, cs=[C(), C()]) for i in range(1, 4) for j in range(1, 21) ]) s.commit()
def test_query(self): A, C, B = (self.classes.A, self.classes.C, self.classes.B) sess = Session() b1, b2 = B(data="b1"), B(data="b2") a1 = A(c=C("a1b1", b1)) a2 = A(c=C("a2b1", b2)) sess.add_all([a1, a2]) sess.commit() eq_(sess.query(A).filter(A.c == C("a2b1", b2)).one(), a2)
def insert_data(cls, connection): A, B, C, D = cls.classes.A, cls.classes.B, cls.classes.C, cls.classes.D s = Session(connection) s.add_all([ A( id=i, bs=[B(id=(i * 5) + j) for j in range(1, 5)], c=C(id=i), ds=[D(id=(i * 5) + j) for j in range(1, 5)], ) for i in range(1, 5) ]) s.commit()
def insert_data(cls, connection): sess = Session(connection) sess.add_all([ cls.classes.Data( d1="d%dd1" % i, d2="d%dd2" % i, d3="d%dd3" % i, others=[ cls.classes.Other(o1="d%do%d" % (i, j)) for j in range(5) ], ) for i in range(10) ]) sess.commit()
def insert_data(cls, connection): A = cls.classes.A B = cls.classes.B # all the x/y are < 10 s = Session(connection) s.add_all( [ A(x=5, y=5, bs=[B(x=4, y=4), B(x=2, y=8), B(x=7, y=1)]), A(x=7, y=5, bs=[B(x=4, y=4), B(x=5, y=8)]), ] ) s.commit()
def _test_comparator_behavior(self): Edge, Point = (self.classes.Edge, self.classes.Point) sess = Session() e1 = Edge(Point(3, 4), Point(5, 6)) e2 = Edge(Point(14, 5), Point(2, 7)) sess.add_all([e1, e2]) sess.commit() assert sess.query(Edge).filter(Edge.start == Point(3, 4)).one() is e1 assert sess.query(Edge).filter(Edge.start != Point(3, 4)).first() is e2 eq_(sess.query(Edge).filter(Edge.start == None).all(), []) # noqa
def insert_data(cls, connection): Engineer, Person, Manager = ( cls.classes.Engineer, cls.classes.Person, cls.classes.Manager, ) s = Session(connection) s.add_all([ Engineer(name="e1", engineer_name="e1"), Manager(name="m1", manager_name="m1"), Engineer(name="e2", engineer_name="e2"), Person(name="p1"), ]) s.commit()
def test_self_referential_roundtrip(self): place, Place, place_place = ( self.tables.place, self.classes.Place, self.tables.place_place, ) mapper( Place, place, properties={ "places": relationship( Place, secondary=place_place, primaryjoin=place.c.place_id == place_place.c.pl1_id, secondaryjoin=place.c.place_id == place_place.c.pl2_id, order_by=place_place.c.pl2_id, ) }, ) sess = Session() p1 = Place("place1") p2 = Place("place2") p3 = Place("place3") p4 = Place("place4") p5 = Place("place5") p6 = Place("place6") p7 = Place("place7") sess.add_all((p1, p2, p3, p4, p5, p6, p7)) p1.places.append(p2) p1.places.append(p3) p5.places.append(p6) p6.places.append(p1) p7.places.append(p1) p1.places.append(p5) p4.places.append(p3) p3.places.append(p4) sess.commit() eq_(p1.places, [p2, p3, p5]) eq_(p5.places, [p6]) eq_(p7.places, [p1]) eq_(p6.places, [p1]) eq_(p4.places, [p3]) eq_(p3.places, [p4]) eq_(p2.places, [])
def test_expire_lots(self): Parent, Child = self.classes.Parent, self.classes.Child obj = [ Parent(children=[Child() for j in range(10)]) for i in range(10) ] sess = Session() sess.add_all(obj) sess.flush() @profiling.function_call_count() def go(): sess.expire_all() go()
def test_bulk_update(self): (User, ) = self.classes("User") s = Session(expire_on_commit=False) objects = [User(name="u1"), User(name="u2"), User(name="u3")] s.add_all(objects) s.commit() s = Session() with self.sql_execution_asserter() as asserter: s.bulk_update_mappings( User, [ { "id": 1, "name": "u1new" }, { "id": 2, "name": "u2" }, { "id": 3, "name": "u3new" }, ], ) asserter.assert_( CompiledSQL( "UPDATE users SET name=:name WHERE users.id = :users_id", [ { "users_id": 1, "name": "u1new" }, { "users_id": 2, "name": "u2" }, { "users_id": 3, "name": "u3new" }, ], ))
def test_query(self): Json = self.classes.Json s = Session(testing.db) s.add_all([Json(), Json(json={"field": 10}), Json(json={"field": 20})]) s.commit() a1 = (s.query(Json).filter( Json.json["field"].astext.cast(Integer) == 10).one()) a2 = s.query(Json).filter(Json.field.astext == "10").one() eq_(a1.id, a2.id) a3 = s.query(Json).filter(Json.field.astext == "20").one() ne_(a1.id, a3.id) a4 = s.query(Json).filter(Json.json_field.astext == "10").one() eq_(a2.id, a4.id) a5 = s.query(Json).filter(Json.int_field == 10).one() eq_(a2.id, a5.id) a6 = s.query(Json).filter(Json.text_field == "10").one() eq_(a2.id, a6.id)
def test_query(self): Array = self.classes.Array s = Session(testing.db) s.add_all([ Array(), Array(array=[1, 2, 3], array0=[1, 2, 3]), Array(array=[4, 5, 6], array0=[4, 5, 6]), ]) s.commit() a1 = s.query(Array).filter(Array.array == [1, 2, 3]).one() a2 = s.query(Array).filter(Array.first == 1).one() eq_(a1.id, a2.id) a3 = s.query(Array).filter(Array.first == 4).one() ne_(a1.id, a3.id) a4 = s.query(Array).filter(Array.first0 == 1).one() eq_(a1.id, a4.id) a5 = s.query(Array).filter(Array.first0 == 4).one() ne_(a1.id, a5.id)
def test_remove_scalar(self): # test setting a uselist=False to None self._bidirectional_onescalar_fixture() A, B = self.classes.A, self.classes.B secondary = self.tables.secondary sess = Session() sess.add_all([A(data="a1", bs=[B(data="b1"), B(data="b2")])]) sess.commit() a1 = sess.query(A).filter_by(data="a1").one() b2 = sess.query(B).filter_by(data="b2").one() assert b2.a is a1 b2.a = None sess.commit() eq_(a1.bs, [B(data="b1")]) eq_(b2.a, None) eq_(sess.query(secondary).count(), 1)
def test_session_delete(self): self._standard_bidirectional_fixture() A, B = self.classes.A, self.classes.B secondary = self.tables.secondary sess = Session() sess.add_all( [A(data="a1", bs=[B(data="b1")]), A(data="a2", bs=[B(data="b2")])]) sess.commit() a1 = sess.query(A).filter_by(data="a1").one() sess.delete(a1) sess.flush() eq_(sess.query(secondary).count(), 1) a2 = sess.query(A).filter_by(data="a2").one() sess.delete(a2) sess.flush() eq_(sess.query(secondary).count(), 0)
def test_mapper_args_deferred(self): """test that __mapper_args__ is not called until *after* table reflection""" class User(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = "users" @decl.declared_attr def __mapper_args__(cls): return {"primary_key": cls.__table__.c.id} decl.DeferredReflection.prepare(testing.db) sess = Session() sess.add_all( [User(name="G"), User(name="Q"), User(name="A"), User(name="C")] ) sess.commit() eq_( sess.query(User).order_by(User.name).all(), [User(name="A"), User(name="C"), User(name="G"), User(name="Q")], )
def test_lazyload_singlecast(self): Person = self.classes.Person Pet = self.classes.Pet s = Session() s.add_all([Person(id=5), Pet(id=1, person_id=5)]) s.commit() p1 = s.query(Person).first() with self.sql_execution_asserter() as asserter: p1.pets asserter.assert_( CompiledSQL( "SELECT pets.id AS pets_id, pets.person_id " "AS pets_person_id FROM pets " "WHERE pets.person_id = CAST(:param_1 AS INTEGER)", [{ "param_1": 5 }], ))
def test_custom_comparator_factory(self): self._fixture(True) Edge, Point = (self.classes.Edge, self.classes.Point) edge_1, edge_2 = ( Edge(Point(0, 0), Point(3, 5)), Edge(Point(0, 1), Point(3, 5)), ) sess = Session() sess.add_all([edge_1, edge_2]) sess.commit() near_edges = (sess.query(Edge).filter(Edge.start.near(Point(1, 1), 1)).all()) assert edge_1 not in near_edges assert edge_2 in near_edges near_edges = (sess.query(Edge).filter(Edge.start.near(Point(0, 1), 1)).all()) assert edge_1 in near_edges and edge_2 in near_edges
def insert_data(cls, connection): s = Session(connection) s.add_all(cls._fixture()) s.commit()
def test_bidirectional(self): place_input, transition, Transition, Place, place, place_output = ( self.tables.place_input, self.tables.transition, self.classes.Transition, self.classes.Place, self.tables.place, self.tables.place_output, ) mapper(Place, place) mapper( Transition, transition, properties=dict( inputs=relationship( Place, place_output, backref=backref("inputs", order_by=transition.c.transition_id), order_by=Place.place_id, ), outputs=relationship( Place, place_input, backref=backref("outputs", order_by=transition.c.transition_id), order_by=Place.place_id, ), ), ) t1 = Transition("transition1") t2 = Transition("transition2") t3 = Transition("transition3") p1 = Place("place1") p2 = Place("place2") p3 = Place("place3") sess = Session() sess.add_all([p3, p1, t1, t2, p2, t3]) t1.inputs.append(p1) t1.inputs.append(p2) t1.outputs.append(p3) t2.inputs.append(p1) p2.inputs.append(t2) p3.inputs.append(t2) p1.outputs.append(t1) sess.commit() self.assert_result( [t1], Transition, {"outputs": (Place, [{ "name": "place3" }, { "name": "place1" }])}, ) self.assert_result( [p2], Place, { "inputs": ( Transition, [{ "name": "transition1" }, { "name": "transition2" }], ) }, )
def test_update_computed(self, eager): if eager: Thing = self.classes.Thing else: Thing = self.classes.ThingNoEager s = Session() t1, t2 = (Thing(id=1, foo=1), Thing(id=2, foo=2)) s.add_all([t1, t2]) s.flush() t1.foo = 5 t2.foo = 6 with assert_engine(testing.db) as asserter: s.flush() eq_(t1.bar, 5 + 42) eq_(t2.bar, 6 + 42) if eager and testing.db.dialect.implicit_returning: asserter.assert_( CompiledSQL( "UPDATE test SET foo=%(foo)s " "WHERE test.id = %(test_id)s " "RETURNING test.bar", [{ "foo": 5, "test_id": 1 }], dialect="postgresql", ), CompiledSQL( "UPDATE test SET foo=%(foo)s " "WHERE test.id = %(test_id)s " "RETURNING test.bar", [{ "foo": 6, "test_id": 2 }], dialect="postgresql", ), ) elif eager: asserter.assert_( CompiledSQL( "UPDATE test SET foo=:foo WHERE test.id = :test_id", [{ "foo": 5, "test_id": 1 }], ), CompiledSQL( "UPDATE test SET foo=:foo WHERE test.id = :test_id", [{ "foo": 6, "test_id": 2 }], ), CompiledSQL( "SELECT test.bar AS test_bar FROM test " "WHERE test.id = :param_1", [{ "param_1": 1 }], ), CompiledSQL( "SELECT test.bar AS test_bar FROM test " "WHERE test.id = :param_1", [{ "param_1": 2 }], ), ) else: asserter.assert_( CompiledSQL( "UPDATE test SET foo=:foo WHERE test.id = :test_id", [{ "foo": 5, "test_id": 1 }, { "foo": 6, "test_id": 2 }], ), CompiledSQL( "SELECT test.bar AS test_bar FROM test " "WHERE test.id = :param_1", [{ "param_1": 1 }], ), CompiledSQL( "SELECT test.bar AS test_bar FROM test " "WHERE test.id = :param_1", [{ "param_1": 2 }], ), )
def test_threelevel_selectin_to_inline_awkward_alias_options(self): self._fixture_from_geometry( { "a": { "subclasses": { "b": {}, "c": {"subclasses": {"d": {}, "e": {}}}, } } } ) a, b, c, d, e = self.classes("a", "b", "c", "d", "e") sess = Session() sess.add_all([d(d_data="d1"), e(e_data="e1")]) sess.commit() from sqlalchemy_1_3 import select a_table, c_table, d_table, e_table = self.tables("a", "c", "d", "e") poly = ( select([a_table.c.id, a_table.c.type, c_table, d_table, e_table]) .select_from( a_table.join(c_table).outerjoin(d_table).outerjoin(e_table) ) .apply_labels() .alias("poly") ) c_alias = with_polymorphic(c, (d, e), poly) q = ( sess.query(a) .options(selectin_polymorphic(a, [b, c_alias])) .order_by(a.id) ) result = self.assert_sql_execution( testing.db, q.all, CompiledSQL( "SELECT a.type AS a_type, a.id AS a_id, " "a.a_data AS a_a_data FROM a ORDER BY a.id", {}, ), Or( # here, the test is that the adaptation of "a" takes place CompiledSQL( "SELECT poly.a_type AS poly_a_type, " "poly.c_id AS poly_c_id, " "poly.a_id AS poly_a_id, poly.c_c_data AS poly_c_c_data, " "poly.e_id AS poly_e_id, poly.e_e_data AS poly_e_e_data, " "poly.d_id AS poly_d_id, poly.d_d_data AS poly_d_d_data " "FROM (SELECT a.id AS a_id, a.type AS a_type, " "c.id AS c_id, " "c.c_data AS c_c_data, d.id AS d_id, " "d.d_data AS d_d_data, " "e.id AS e_id, e.e_data AS e_e_data FROM a JOIN c " "ON a.id = c.id LEFT OUTER JOIN d ON c.id = d.id " "LEFT OUTER JOIN e ON c.id = e.id) AS poly " "WHERE poly.a_id IN ([EXPANDING_primary_keys]) " "ORDER BY poly.a_id", [{"primary_keys": [1, 2]}], ), CompiledSQL( "SELECT poly.a_type AS poly_a_type, " "poly.c_id AS poly_c_id, " "poly.a_id AS poly_a_id, poly.c_c_data AS poly_c_c_data, " "poly.d_id AS poly_d_id, poly.d_d_data AS poly_d_d_data, " "poly.e_id AS poly_e_id, poly.e_e_data AS poly_e_e_data " "FROM (SELECT a.id AS a_id, a.type AS a_type, " "c.id AS c_id, c.c_data AS c_c_data, d.id AS d_id, " "d.d_data AS d_d_data, e.id AS e_id, " "e.e_data AS e_e_data FROM a JOIN c ON a.id = c.id " "LEFT OUTER JOIN d ON c.id = d.id " "LEFT OUTER JOIN e ON c.id = e.id) AS poly " "WHERE poly.a_id IN ([EXPANDING_primary_keys]) " "ORDER BY poly.a_id", [{"primary_keys": [1, 2]}], ), ), ) with self.assert_statement_count(testing.db, 0): eq_(result, [d(d_data="d1"), e(e_data="e1")])