def test_transactional_advanced(self): canary = [] class TrackProxy(ConnectionProxy): def __getattribute__(self, key): fn = object.__getattribute__(self, key) def go(*arg, **kw): canary.append(fn.__name__) return fn(*arg, **kw) return go engine = engines.testing_engine(options={'proxy':TrackProxy()}) conn = engine.connect() trans = conn.begin() trans2 = conn.begin_nested() conn.execute(select([1])) trans2.rollback() trans2 = conn.begin_nested() conn.execute(select([1])) trans2.commit() trans.rollback() trans = conn.begin_twophase() conn.execute(select([1])) trans.prepare() trans.commit() canary = [t for t in canary if t not in ('cursor_execute', 'execute')] eq_(canary, ['begin', 'savepoint', 'rollback_savepoint', 'savepoint', 'release_savepoint', 'rollback', 'begin_twophase', 'prepare_twophase', 'commit_twophase'] )
def test_popitem(self): data, wim = self._fixture() (needle, idx) = wim.popitem() assert needle in data eq_(len(data), (len(wim) + 1)) assert id(needle) not in wim.by_id
def test_no_init(self): class Foo(object): pass eq_( util.generic_repr(Foo()), "Foo()" )
def test_retval_flag(self): canary = [] def tracker(name): def go(conn, *args, **kw): canary.append(name) return go def execute(conn, clauseelement, multiparams, params): canary.append('execute') return clauseelement, multiparams, params def cursor_execute(conn, cursor, statement, parameters, context, executemany): canary.append('cursor_execute') return statement, parameters engine = engines.testing_engine() assert_raises( tsa.exc.ArgumentError, event.listen, engine, "begin", tracker("begin"), retval=True ) event.listen(engine, "before_execute", execute, retval=True) event.listen(engine, "before_cursor_execute", cursor_execute, retval=True) engine.execute(select([1])) eq_( canary, ['execute', 'cursor_execute'] )
def test_sets(self): # Py2K import sets # end Py2K class SetLike(object): def add(self): pass class ForcedSet(list): __emulates__ = set for type_ in (set, # Py2K sets.Set, # end Py2K SetLike, ForcedSet): eq_(util.duck_type_collection(type_), set) instance = type_() eq_(util.duck_type_collection(instance), set) for type_ in (frozenset, # Py2K sets.ImmutableSet # end Py2K ): is_(util.duck_type_collection(type_), None) instance = type_() is_(util.duck_type_collection(instance), None)
def test_tometadata_strip_schema(self): meta = MetaData() table = Table('mytable', meta, Column('myid', Integer, primary_key=True), Column('name', String(40), nullable=True), Column('description', String(30), CheckConstraint("description='hi'")), UniqueConstraint('name'), test_needs_fk=True, ) table2 = Table('othertable', meta, Column('id', Integer, primary_key=True), Column('myid', Integer, ForeignKey('mytable.myid')), test_needs_fk=True, ) meta2 = MetaData() table_c = table.tometadata(meta2, schema=None) table2_c = table2.tometadata(meta2, schema=None) eq_(str(table_c.join(table2_c).onclause), str(table_c.c.myid == table2_c.c.myid)) eq_(str(table_c.join(table2_c).onclause), 'mytable.myid = othertable.myid')
def test_assorted_repr(self): t1 = Table("foo", MetaData(), Column("x", Integer)) i1 = Index("bar", t1.c.x) ck = schema.CheckConstraint("x > y", name="someconstraint") for const, exp in ( (Sequence("my_seq"), "Sequence('my_seq')"), (Sequence("my_seq", start=5), "Sequence('my_seq', start=5)"), (Column("foo", Integer), "Column('foo', Integer(), table=None)"), (Table("bar", MetaData(), Column("x", String)), "Table('bar', MetaData(bind=None), " "Column('x', String(), table=<bar>), schema=None)"), (schema.DefaultGenerator(for_update=True), "DefaultGenerator(for_update=True)"), (schema.Index("bar"), "Index('bar')"), (i1, "Index('bar', Column('x', Integer(), table=<foo>))"), (schema.FetchedValue(), "FetchedValue()"), (ck, "CheckConstraint(" "%s" ", name='someconstraint')" % repr(ck.sqltext)), ): eq_( repr(const), exp )
def test_nativeext_submanager(self): class Mine(instrumentation.ClassManager): pass class A(object): __sa_instrumentation_manager__ = Mine instrumentation.register_class(A) eq_(type(instrumentation.manager_of_class(A)), Mine)
def test_all_events(self): canary = [] def before_attach(obj, parent): canary.append("%s->%s" % (obj.__class__.__name__, parent.__class__.__name__)) def after_attach(obj, parent): canary.append("%s->%s" % (obj.__class__.__name__, parent)) event.listen(schema.SchemaItem, "before_parent_attach", before_attach) event.listen(schema.SchemaItem, "after_parent_attach", after_attach) m = MetaData() t1 = Table('t1', m, Column('id', Integer, Sequence('foo_id'), primary_key=True), Column('bar', String, ForeignKey('t2.id')) ) t2 = Table('t2', m, Column('id', Integer, primary_key=True), ) eq_( canary, ['Sequence->Column', 'Sequence->id', 'ForeignKey->Column', 'ForeignKey->bar', 'Table->MetaData', 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1', 'Column->Table', 'Column->t1', 'Column->Table', 'Column->t1', 'ForeignKeyConstraint->Table', 'ForeignKeyConstraint->t1', 'Table->MetaData(bind=None)', 'Table->MetaData', 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2', 'Column->Table', 'Column->t2', 'Table->MetaData(bind=None)'] )
def test_basic(self): pjoin = polymorphic_union({'manager': managers_table, 'engineer' : engineers_table}, 'type', 'pjoin') employee_mapper = mapper(Employee, pjoin, polymorphic_on=pjoin.c.type) manager_mapper = mapper(Manager, managers_table, inherits=employee_mapper, concrete=True, polymorphic_identity='manager') engineer_mapper = mapper(Engineer, engineers_table, inherits=employee_mapper, concrete=True, polymorphic_identity='engineer') session = create_session() session.add(Manager('Tom', 'knows how to manage things')) session.add(Engineer('Kurt', 'knows how to hack')) session.flush() session.expunge_all() assert set([repr(x) for x in session.query(Employee)]) \ == set(['Engineer Kurt knows how to hack', 'Manager Tom knows how to manage things']) assert set([repr(x) for x in session.query(Manager)]) \ == set(['Manager Tom knows how to manage things']) assert set([repr(x) for x in session.query(Engineer)]) \ == set(['Engineer Kurt knows how to hack']) manager = session.query(Manager).one() session.expire(manager, ['manager_data']) eq_(manager.manager_data, 'knows how to manage things')
def test_selective_relationships(self): sub, base_mtom, Related, Base, related, sub_mtom, base, Sub = (self.tables.sub, self.tables.base_mtom, self.classes.Related, self.classes.Base, self.tables.related, self.tables.sub_mtom, self.tables.base, self.classes.Sub) mapper(Base, base, properties={'related': relationship(Related, secondary=base_mtom, backref='bases', order_by=related.c.id)}) mapper(Sub, sub, inherits=Base, concrete=True, properties={'related': relationship(Related, secondary=sub_mtom, backref='subs', order_by=related.c.id)}) mapper(Related, related) sess = sessionmaker()() b1, s1, r1, r2, r3 = Base(), Sub(), Related(), Related(), \ Related() b1.related.append(r1) b1.related.append(r2) s1.related.append(r2) s1.related.append(r3) sess.add_all([b1, s1]) sess.commit() eq_(s1.related, [r2, r3]) eq_(b1.related, [r1, r2])
def test_one_to_many_on_m2o(self): Node, nodes = self.classes.Node, self.tables.nodes mapper(Node, nodes, properties={ 'children': relationship(Node, backref=sa.orm.backref('parentnode', remote_side=nodes.c.name, passive_updates=False), )}) sess = Session() n1 = Node(name='n1') sess.add(n1) n2 = Node(name='n11', parentnode=n1) n3 = Node(name='n12', parentnode=n1) n4 = Node(name='n13', parentnode=n1) sess.add_all([n2, n3, n4]) sess.commit() n1.name = 'new n1' sess.commit() eq_(['new n1', 'new n1', 'new n1'], [n.parent for n in sess.query(Node).filter( Node.name.in_(['n11', 'n12', 'n13']))])
def test_one_to_many_on_o2m(self): Node, nodes = self.classes.Node, self.tables.nodes mapper(Node, nodes, properties={ 'children': relationship(Node, backref=sa.orm.backref('parentnode', remote_side=nodes.c.name), passive_updates=False )}) sess = Session() n1 = Node(name='n1') n1.children.append(Node(name='n11')) n1.children.append(Node(name='n12')) n1.children.append(Node(name='n13')) sess.add(n1) sess.commit() n1.name = 'new n1' sess.commit() eq_(n1.children[1].parent, 'new n1') eq_(['new n1', 'new n1', 'new n1'], [n.parent for n in sess.query(Node).filter( Node.name.in_(['n11', 'n12', 'n13']))])
def test_distinct(self): users, items, order_items, orders, Item, User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) mapper(Item, items) mapper(Order, orders, properties={"items": relationship(Item, secondary=order_items, lazy="select")}) mapper( User, users, properties={ "addresses": relationship(mapper(Address, addresses), lazy="select"), "orders": relationship(Order, lazy="select"), }, ) sess = create_session() q = sess.query(User) # use a union all to get a lot of rows to join against u2 = users.alias("u2") s = sa.union_all(u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias("u") l = q.filter(s.c.u2_id == User.id).order_by(User.id).distinct().all() eq_(self.static.user_all_result, l)
def test_many_to_one_binds(self): Address, addresses, users, User = ( self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User, ) mapper(Address, addresses, primary_key=[addresses.c.user_id, addresses.c.email_address]) mapper( User, users, properties=dict( address=relationship( Address, uselist=False, primaryjoin=sa.and_( users.c.id == addresses.c.user_id, addresses.c.email_address == "*****@*****.**" ), ) ), ) q = create_session().query(User) eq_( [ User(id=7, address=None), User(id=8, address=Address(id=3)), User(id=9, address=None), User(id=10, address=None), ], list(q), )
def test_update_changes_resets_dirty(self): User = self.classes.User sess = Session(autoflush=False) john,jack,jill,jane = sess.query(User).order_by(User.id).all() john.age = 50 jack.age = 37 # autoflush is false. therefore our '50' and '37' are getting # blown away by this operation. sess.query(User).filter(User.age > 29).\ update({'age': User.age - 10}, synchronize_session='evaluate') for x in (john, jack, jill, jane): assert not sess.is_modified(x) eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27]) john.age = 25 assert john in sess.dirty assert jack in sess.dirty assert jill not in sess.dirty assert not sess.is_modified(john) assert not sess.is_modified(jack)
def test_clear(self): db = sqlsoup.SqlSoup(engine) eq_(db.loans.count(), 1) _ = db.loans.insert(book_id=1, user_name='Bhargan Basepair') db.expunge_all() db.flush() eq_(db.loans.count(), 1)
def test_exec_three_table(self): users, addresses, dingalings = \ self.tables.users, \ self.tables.addresses, \ self.tables.dingalings testing.db.execute( addresses.update().\ values(email_address=users.c.name).\ where(users.c.id==addresses.c.user_id).\ where(users.c.name=='ed'). where(addresses.c.id==dingalings.c.address_id).\ where(dingalings.c.id==1), ) eq_( testing.db.execute( addresses.select().order_by(addresses.c.id) ).fetchall(), [ (1, 7, 'x', "*****@*****.**"), (2, 8, 'x', "ed"), (3, 8, 'x', "*****@*****.**"), (4, 8, 'x', "*****@*****.**"), (5, 9, 'x', "*****@*****.**") ] )
def test_exec_multitable(self): users, addresses = self.tables.users, self.tables.addresses testing.db.execute( addresses.update().\ values({ addresses.c.email_address:users.c.name, users.c.name:'ed2' }).\ where(users.c.id==addresses.c.user_id).\ where(users.c.name=='ed') ) eq_( testing.db.execute( addresses.select().order_by(addresses.c.id)).fetchall(), [ (1, 7, 'x', "*****@*****.**"), (2, 8, 'x', "ed"), (3, 8, 'x', "ed"), (4, 8, 'x', "ed"), (5, 9, 'x', "*****@*****.**") ] ) eq_( testing.db.execute( users.select().order_by(users.c.id)).fetchall(), [ (7, 'jack'), (8, 'ed2'), (9, 'fred'), (10, 'chuck') ] )
def test_anon_alias(self): table1 = self.tables.table1 compile_dialect = default.DefaultDialect() compile_dialect.max_identifier_length = IDENT_LENGTH q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias() x = select([q], use_labels=True) self.assert_compile(x, "SELECT anon_1.this_is_the_primarykey_column AS " "anon_1_this_is_the_prim_1, anon_1.this_is_the_data_column " "AS anon_1_this_is_the_data_2 " "FROM (SELECT some_large_named_table." "this_is_the_primarykey_column AS " "this_is_the_primarykey_column, " "some_large_named_table.this_is_the_data_column " "AS this_is_the_data_column " "FROM some_large_named_table " "WHERE some_large_named_table.this_is_the_primarykey_column " "= :this_is_the_primarykey__1) AS anon_1", dialect=compile_dialect) eq_( list(testing.db.execute(x)), [(4, u'data4')] )
def test_no_load_with_backrefs(self): """load=False populates relationships in both directions without requiring a load""" users, Address, addresses, User = (self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses':relationship(mapper(Address, addresses), backref='user') }) u = User(id=7, name='fred', addresses=[ Address(email_address='ad1'), Address(email_address='ad2')]) sess = create_session() sess.add(u) sess.flush() sess.close() assert 'user' in u.addresses[1].__dict__ sess = create_session() u2 = sess.merge(u, load=False) assert 'user' in u2.addresses[1].__dict__ eq_(u2.addresses[1].user, User(id=7, name='fred')) sess.expire(u2.addresses[1], ['user']) assert 'user' not in u2.addresses[1].__dict__ sess.close() sess = create_session() u = sess.merge(u2, load=False) assert 'user' not in u.addresses[1].__dict__ eq_(u.addresses[1].user, User(id=7, name='fred'))
def go(): engine = engines.testing_engine( options={'logging_name':'FOO', 'pool_logging_name':'BAR', 'use_reaper':False} ) sess = create_session(bind=engine) a1 = A(col2="a1") a2 = A(col2="a2") a3 = A(col2="a3") a1.bs.append(B(col2="b1")) a1.bs.append(B(col2="b2")) a3.bs.append(B(col2="b3")) for x in [a1,a2,a3]: sess.add(x) sess.flush() sess.expunge_all() alist = sess.query(A).all() eq_( [ A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]), A(col2="a2", bs=[]), A(col2="a3", bs=[B(col2="b3")]) ], alist) for a in alist: sess.delete(a) sess.flush() sess.close() engine.dispose()
def _test_autoincrement(self, bind): aitable = self.tables.aitable ids = set() rs = bind.execute(aitable.insert(), int1=1) last = rs.inserted_primary_key[0] self.assert_(last) self.assert_(last not in ids) ids.add(last) rs = bind.execute(aitable.insert(), str1='row 2') last = rs.inserted_primary_key[0] self.assert_(last) self.assert_(last not in ids) ids.add(last) rs = bind.execute(aitable.insert(), int1=3, str1='row 3') last = rs.inserted_primary_key[0] self.assert_(last) self.assert_(last not in ids) ids.add(last) rs = bind.execute(aitable.insert(values={'int1':func.length('four')})) last = rs.inserted_primary_key[0] self.assert_(last) self.assert_(last not in ids) ids.add(last) eq_(ids, set([1,2,3,4])) eq_(list(bind.execute(aitable.select().order_by(aitable.c.id))), [(1, 1, None), (2, None, 'row 2'), (3, 3, 'row 3'), (4, 4, None)])
def test_updatemany(self): # MySQL-Python 1.2.2 breaks functions in execute_many :( if (testing.against('mysql+mysqldb') and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)): return t.insert().execute({}, {}, {}) t.update(t.c.col1==sa.bindparam('pkval')).execute( {'pkval':51,'col7':None, 'col8':None, 'boolcol1':False}) t.update(t.c.col1==sa.bindparam('pkval')).execute( {'pkval':51,}, {'pkval':52,}, {'pkval':53,}) l = t.select().execute() ctexec = currenttime.scalar() today = datetime.date.today() eq_(l.fetchall(), [(51, 'im the update', f2, ts, ts, ctexec, False, False, 13, today, 'py'), (52, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py'), (53, 'im the update', f2, ts, ts, ctexec, True, False, 13, today, 'py')])
def test_update_values(self): r = t.insert().execute() pk = r.inserted_primary_key[0] t.update(t.c.col1==pk, values={'col3': 55}).execute() l = t.select(t.c.col1==pk).execute() l = l.first() eq_(55, l['col3'])
def test_transactional_advanced(self): canary = [] def tracker(name): def go(*args, **kw): canary.append(name) return go engine = engines.testing_engine() for name in ['begin', 'savepoint', 'rollback_savepoint', 'release_savepoint', 'rollback', 'begin_twophase', 'prepare_twophase', 'commit_twophase']: event.listen(engine, '%s' % name, tracker(name)) conn = engine.connect() trans = conn.begin() trans2 = conn.begin_nested() conn.execute(select([1])) trans2.rollback() trans2 = conn.begin_nested() conn.execute(select([1])) trans2.commit() trans.rollback() trans = conn.begin_twophase() conn.execute(select([1])) trans.prepare() trans.commit() eq_(canary, ['begin', 'savepoint', 'rollback_savepoint', 'savepoint', 'release_savepoint', 'rollback', 'begin_twophase', 'prepare_twophase', 'commit_twophase'] )
def test_insert(self): r = t.insert().execute() assert r.lastrow_has_defaults() eq_(set(r.context.postfetch_cols), set([t.c.col3, t.c.col5, t.c.col4, t.c.col6])) r = t.insert(inline=True).execute() assert r.lastrow_has_defaults() eq_(set(r.context.postfetch_cols), set([t.c.col3, t.c.col5, t.c.col4, t.c.col6])) t.insert().execute() ctexec = sa.select([currenttime.label('now')], bind=testing.db).scalar() l = t.select().order_by(t.c.col1).execute() today = datetime.date.today() eq_(l.fetchall(), [ (x, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py') for x in range(51, 54)]) t.insert().execute(col9=None) assert r.lastrow_has_defaults() eq_(set(r.context.postfetch_cols), set([t.c.col3, t.c.col5, t.c.col4, t.c.col6])) eq_(t.select(t.c.col1==54).execute().fetchall(), [(54, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, None)])
def go(): sess = create_session() a1 = A(col2="a1") a2 = A(col2="a2") a3 = A(col2="a3") a1.bs.append(B(col2="b1")) a1.bs.append(B(col2="b2")) a3.bs.append(B(col2="b3")) for x in [a1,a2,a3]: sess.add(x) sess.flush() sess.expunge_all() alist = sess.query(A).all() eq_( [ A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]), A(col2="a2", bs=[]), A(col2="a3", bs=[B(col2="b3")]) ], alist) for a in alist: sess.delete(a) sess.flush()
def test_transactional(self): canary = [] def tracker(name): def go(conn, *args, **kw): canary.append(name) return go engine = engines.testing_engine() event.listen(engine, 'before_execute', tracker('execute')) event.listen(engine, 'before_cursor_execute', tracker('cursor_execute')) event.listen(engine, 'begin', tracker('begin')) event.listen(engine, 'commit', tracker('commit')) event.listen(engine, 'rollback', tracker('rollback')) conn = engine.connect() trans = conn.begin() conn.execute(select([1])) trans.rollback() trans = conn.begin() conn.execute(select([1])) trans.commit() eq_(canary, [ 'begin', 'execute', 'cursor_execute', 'rollback', 'begin', 'execute', 'cursor_execute', 'commit', ])
def go(): m1 = mapper(A, table1, properties={ "bs":relationship(B, order_by=table2.c.col1) }) m2 = mapper(B, table2) m3 = mapper(A, table1, non_primary=True) sess = create_session() a1 = A(col2="a1") a2 = A(col2="a2") a3 = A(col2="a3") a1.bs.append(B(col2="b1")) a1.bs.append(B(col2="b2")) a3.bs.append(B(col2="b3")) for x in [a1,a2,a3]: sess.add(x) sess.flush() sess.expunge_all() alist = sess.query(A).order_by(A.col1).all() eq_( [ A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]), A(col2="a2", bs=[]), A(col2="a3", bs=[B(col2="b3")]) ], alist) for a in alist: sess.delete(a) sess.flush() sess.close() clear_mappers()
def go(): eq_( sess.query(Person).with_polymorphic( Engineer, people.outerjoin(engineers)).all(), self._emps_wo_relationships_fixture())
def test_query_subclass_join_to_base_relationship(self): sess = create_session() # non-polymorphic eq_(sess.query(Engineer).join(Person.paperwork).all(), [e1, e2, e3])
def go(): # test load People with subqueryload to engineers + machines eq_( sess.query(Person).with_polymorphic('*').options( subqueryload(Engineer.machines)).filter( Person.name == 'dilbert').all(), expected)
def go(): eq_( sess.query(Company).options( subqueryload_all(Company.employees.of_type(Engineer), Engineer.machines)).all(), expected)
def go(): # test load Companies with lazy load to 'employees' eq_(sess.query(Company).all(), expected)
def go(): eq_( sess.query(Person).options(joinedload(Engineer.machines))[1:3], all_employees[1:3])
def go(): # limit the polymorphic join down to just "Person", # overriding select_table eq_( sess.query(Person).with_polymorphic(Person).all(), self._emps_wo_relationships_fixture())
def go(): eq_( sess.query(Person).with_polymorphic(Engineer).filter( Engineer.primary_language == 'java').all(), self._emps_wo_relationships_fixture()[0:1])
def test_join_from_columns_or_subclass_seven(self): sess = create_session() eq_( sess.query(Manager).join(Paperwork, Manager.paperwork).order_by( Manager.name).all(), [m1, b1])
def go(): eq_( sess.query(Person).with_polymorphic(Engineer).all(), self._emps_wo_relationships_fixture())
def test_polymorphic_any_ten(self): sess = create_session() any_ = Company.employees.of_type(Engineer).any( and_(Engineer.primary_language == 'cobol')) eq_(sess.query(Company).filter(any_).one(), c2)
def go(): eq_(sess.query(Person).all(), all_employees)
def test_polymorphic_any_eight(self): sess = create_session() any_ = Engineer.machines.any(Machine.name == "Commodore 64") eq_(sess.query(Person).filter(any_).all(), [e2, e3])
def test_join_from_columns_or_subclass_one(self): sess = create_session() expected = [(u'dogbert', ), (u'pointy haired boss', )] eq_(sess.query(Manager.name).order_by(Manager.name).all(), expected)
def test_polymorphic_any_six(self): sess = create_session() any_ = Company.employees.of_type(Boss).any(Boss.golf_swing == 'fore') eq_(sess.query(Company).filter(any_).one(), c1)
def test_polymorphic_any_nine(self): sess = create_session() any_ = Person.paperwork.any(Paperwork.description == "review #2") eq_(sess.query(Person).filter(any_).all(), [m1])
def test_polymorphic_any_three(self): sess = create_session() any_ = Company.employees.any(Person.name == 'vlad') eq_( sess.query(Company).join(Company.employees, aliased=True).filter( Person.name == 'dilbert').filter(any_).all(), [])
def test_polymorphic_any_seven(self): sess = create_session() any_ = Company.employees.of_type(Boss).any( Manager.manager_name == 'pointy') eq_(sess.query(Company).filter(any_).one(), c1)
def test_join_to_polymorphic_aliased(self): sess = create_session() eq_( sess.query(Company).join( 'employees', aliased=True).filter(Person.name == 'vlad').one(), c2)
def test_polymorphic_any_five(self): sess = create_session() calias = aliased(Company) any_ = calias.employees.of_type(Engineer).any( Engineer.primary_language == 'cobol') eq_(sess.query(calias).filter(any_).one(), c2)
def test_join_from_with_polymorphic_nonaliased_two(self): sess = create_session() eq_( sess.query(Person).with_polymorphic( [Manager, Engineer]).join('paperwork').filter( Paperwork.description.like('%#2%')).all(), [e1, m1])
def test_polymorphic_any_one(self): sess = create_session() any_ = Company.employees.any(Person.name == 'vlad') eq_(sess.query(Company).filter(any_).all(), [c2])
def test_join_from_polymorphic_aliased_three(self): sess = create_session() eq_( sess.query(Engineer).join('paperwork', aliased=True).filter( Paperwork.description.like('%#2%')).all(), [e1])
def test_join_from_with_polymorphic_aliased_one(self): sess = create_session() eq_( sess.query(Person).with_polymorphic(Manager).join( 'paperwork', aliased=True).filter( Paperwork.description.like('%review%')).all(), [b1, m1])
def test_filter_on_subclass_six(self): sess = create_session() eq_( sess.query(Boss).filter(Boss.person_id == b1.person_id).one(), Boss(name="pointy haired boss"))
def test_join_from_polymorphic_aliased_four(self): sess = create_session() eq_( sess.query(Person).join('paperwork', aliased=True).filter( Person.name.like('%dog%')).filter( Paperwork.description.like('%#2%')).all(), [m1])
def test_filter_on_subclass_three(self): sess = create_session() eq_( sess.query(Engineer).filter( Engineer.person_id == e1.person_id).first(), Engineer(name="dilbert"))
def test_join_from_polymorphic_nonaliased_two(self): sess = create_session() eq_( sess.query(Person).join('paperwork', aliased=False).filter( Paperwork.description.like('%#2%')).all(), [e1, m1])
def test_filter_on_subclass_two(self): sess = create_session() eq_(sess.query(Engineer).first(), Engineer(name="dilbert"))
def test_filter_on_subclass_four(self): sess = create_session() eq_( sess.query(Manager).filter( Manager.person_id == m1.person_id).one(), Manager(name="dogbert"))