def _test_onetoone(self, passive_updates): mapper(User, users, properties={ "address":relation(Address, passive_updates=passive_updates, uselist=False) }) mapper(Address, addresses) sess = create_session() u1 = User(username='******', fullname='jack') sess.add(u1) sess.flush() a1 = Address(email='jack1') u1.address = a1 sess.add(a1) sess.flush() u1.username = '******' def go(): sess.flush() if passive_updates: sess.expire(u1, ['address']) self.assert_sql_count(testing.db, go, 1) else: self.assert_sql_count(testing.db, go, 2) def go(): sess.flush() self.assert_sql_count(testing.db, go, 0) sess.expunge_all() eq_([Address(username='******')], sess.query(Address).all())
def test_native_odbc_execute(self): t1 = Table('t1', MetaData(), Column('c1', Integer)) dbapi = MockDBAPI() engine = engines.testing_engine('mssql+mxodbc://localhost', options={'module': dbapi, '_initialize': False}) conn = engine.connect() # crud: uses execute conn.execute(t1.insert().values(c1='foo')) conn.execute(t1.delete().where(t1.c.c1 == 'foo')) conn.execute(t1.update().where(t1.c.c1 == 'foo').values(c1='bar' )) # select: uses executedirect conn.execute(t1.select()) # manual flagging conn.execution_options(native_odbc_execute=True).\ execute(t1.select()) conn.execution_options(native_odbc_execute=False).\ execute(t1.insert().values(c1='foo' )) eq_(dbapi.log, [ 'execute', 'execute', 'execute', 'executedirect', 'execute', 'executedirect', ])
def test_basic(self): class A(_fixtures.Base): pass class B(_fixtures.Base): pass mapper(A, table_a, properties={ 'bs':relation(B, cascade="all, delete-orphan") }) mapper(B, table_b) a1 = A(name='a1', bs=[B(name='b1'), B(name='b2'), B(name='b3')]) sess = create_session() sess.add(a1) sess.flush() sess.expunge_all() eq_(sess.query(A).get(a1.id), A(name='a1', bs=[B(name='b1'), B(name='b2'), B(name='b3')])) a1 = sess.query(A).get(a1.id) assert not class_mapper(B)._is_orphan( attributes.instance_state(a1.bs[0])) a1.bs[0].foo='b2modified' a1.bs[1].foo='b3modified' sess.flush() sess.expunge_all() eq_(sess.query(A).get(a1.id), A(name='a1', bs=[B(name='b1'), B(name='b2'), B(name='b3')]))
def test_before_flush_affects_dirty(self): mapper(User, users) class MyExt(sa.orm.session.SessionExtension): def before_flush(self, session, flush_context, objects): for obj in list(session.identity_map.values()): obj.name += " modified" sess = create_session(extension = MyExt(), autoflush=True) u = User(name='u1') sess.add(u) sess.flush() eq_(sess.query(User).order_by(User.name).all(), [ User(name='u1') ] ) sess.add(User(name='u2')) sess.flush() sess.expunge_all() eq_(sess.query(User).order_by(User.name).all(), [ User(name='u1 modified'), User(name='u2') ] )
def _test_decimal(self, tabledef): """Checks a variety of FIXED usages. This is primarily for SERIAL columns, which can be FIXED (scale-less) or (SMALL)INT. Ensures that FIXED id columns are converted to integers and that are assignable as such. Also exercises general decimal assignment and selection behavior. """ meta = MetaData(testing.db) try: if isinstance(tabledef, basestring): # run textual CREATE TABLE testing.db.execute(tabledef) else: _t = tabledef.tometadata(meta) _t.create() t = Table('dectest', meta, autoload=True) vals = [Decimal('2.2'), Decimal('23'), Decimal('2.4'), 25] cols = ['d1','d2','n1','i1'] t.insert().execute(dict(zip(cols,vals))) roundtrip = list(t.select().execute()) eq_(roundtrip, [tuple([1] + vals)]) t.insert().execute(dict(zip(['id'] + cols, [2] + list(roundtrip[0][1:])))) roundtrip2 = list(t.select(order_by=t.c.id).execute()) eq_(roundtrip2, [tuple([1] + vals), tuple([2] + vals)]) finally: try: testing.db.execute("DROP TABLE dectest") except exc.DatabaseError: pass
def test_selfref_onjoined(self): class Taggable(_base.ComparableEntity): pass class User(Taggable): pass mapper( Taggable, taggable, polymorphic_on=taggable.c.type, polymorphic_identity='taggable', properties = { 'owner' : relation (User, primaryjoin=taggable.c.owner_id ==taggable.c.id, remote_side=taggable.c.id ), }) mapper(User, users, inherits=Taggable, polymorphic_identity='user', inherit_condition=users.c.id == taggable.c.id, ) u1 = User(data='u1') t1 = Taggable(owner=u1) sess = create_session() sess.add(t1) sess.flush() sess.expunge_all() eq_( sess.query(Taggable).order_by(Taggable.id).all(), [User(data='u1'), Taggable(owner=User(data='u1'))] )
def test_cascades_onlycollection(self): """Cascade only reaches instances that are still part of the collection, not those that have been removed""" sess = create_session() u = User(name='jack', orders=[Order(description='someorder'), Order(description='someotherorder')]) sess.add(u) sess.flush() o = u.orders[0] del u.orders[0] sess.delete(u) assert u in sess.deleted assert o not in sess.deleted assert o in sess u2 = User(name='newuser', orders=[o]) sess.add(u2) sess.flush() sess.expunge_all() assert users.count().scalar() == 1 assert orders.count().scalar() == 1 eq_(sess.query(User).all(), [User(name='newuser', orders=[Order(description='someorder')])])
def test_polyon_col_setsup(self): class A(_fixtures.Base): pass class B(_fixtures.Base): pass class C(A): pass class D(C): pass poly_select = select([tablea, tableb.c.data.label('discriminator')], from_obj=tablea.join(tableb)).alias('poly') mapper(B, tableb) mapper(A, tablea, with_polymorphic=('*', poly_select), polymorphic_on=poly_select.c.discriminator, properties={ 'b':relation(B, uselist=False) }) mapper(C, tablec, inherits=A,polymorphic_identity='c') mapper(D, tabled, inherits=C, polymorphic_identity='d') c = C(cdata='c1', adata='a1', b=B(data='c')) d = D(cdata='c2', adata='a2', ddata='d2', b=B(data='d')) sess = create_session() sess.add(c) sess.add(d) sess.flush() sess.expunge_all() eq_(sess.query(A).all(), [C(cdata='c1', adata='a1'), D(cdata='c2', adata='a2', ddata='d2')])
def test_parent_refs_descendant(self): class Person(AttrSettable): pass class Manager(Person): pass # note that up until recently (0.4.4), we had to specify "foreign_keys" here # for this primary join. mapper(Person, people, properties={ 'manager':relation(Manager, primaryjoin=(people.c.manager_id == managers.c.person_id), uselist=False, post_update=True) }) mapper(Manager, managers, inherits=Person, inherit_condition=people.c.person_id==managers.c.person_id) eq_(class_mapper(Person).get_property('manager').synchronize_pairs, [(managers.c.person_id,people.c.manager_id)]) session = create_session() p = Person(name='some person') m = Manager(name='some manager') p.manager = m session.add(p) session.flush() session.expunge_all() p = session.query(Person).get(p.person_id) m = session.query(Manager).get(m.person_id) print p, m, p.manager assert p.manager is m
def test_populate_dict(self): uowcommit, a1, b1, a_mapper, b_mapper = self._fixture() a1.obj().id = 10 pairs = [(a_mapper.c.id, b_mapper.c.id,)] dest = {} sync.populate_dict(a1, a_mapper, dest, pairs) eq_(dest, {'id':10})
def test_nativeext_submanager(self): class Mine(attributes.ClassManager): pass class A(object): __sa_instrumentation_manager__ = Mine attributes.register_class(A) eq_(type(attributes.manager_of_class(A)), Mine)
def go(): m1 = mapper(A, table1, properties={ "bs":relation(B, order_by=table2.c.col1) }) m2 = mapper(B, table2) m3 = mapper(A, table1, non_primary=True) sess = create_session() a1 = A(col2="a1") a2 = A(col2="a2") a3 = A(col2="a3") a1.bs.append(B(col2="b1")) a1.bs.append(B(col2="b2")) a3.bs.append(B(col2="b3")) for x in [a1,a2,a3]: sess.add(x) sess.flush() sess.expunge_all() alist = sess.query(A).order_by(A.col1).all() eq_( [ A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]), A(col2="a2", bs=[]), A(col2="a3", bs=[B(col2="b3")]) ], alist) for a in alist: sess.delete(a) sess.flush() sess.close() clear_mappers()
def test_clear(self): uowcommit, a1, b1, a_mapper, b_mapper = self._fixture() pairs = [(a_mapper.c.id, b_mapper.c.t1id,)] b1.obj().t1id = 8 eq_(b1.obj().__dict__['t1id'], 8) sync.clear(b1, b_mapper, pairs) eq_(b1.obj().__dict__['t1id'], None)
def test_non_orphan(self): """test that an entity can have two parent delete-orphan cascades, and persists normally.""" class Address(_fixtures.Base): pass class Home(_fixtures.Base): pass class Business(_fixtures.Base): pass mapper(Address, addresses) mapper(Home, homes, properties={'address' : relationship(Address, cascade='all,delete-orphan', single_parent=True)}) mapper(Business, businesses, properties={'address' : relationship(Address, cascade='all,delete-orphan', single_parent=True)}) session = create_session() h1 = Home(description='home1', address=Address(street='address1')) b1 = Business(description='business1', address=Address(street='address2')) session.add_all((h1,b1)) session.flush() session.expunge_all() eq_(session.query(Home).get(h1.id), Home(description='home1', address=Address(street='address1'))) eq_(session.query(Business).get(b1.id), Business(description='business1', address=Address(street='address2')))
def go(): sess = create_session() a1 = A(col2="a1") a2 = A(col2="a2") a3 = A(col2="a3") a1.bs.append(B(col2="b1")) a1.bs.append(B(col2="b2")) a3.bs.append(B(col2="b3")) for x in [a1,a2,a3]: sess.add(x) sess.flush() sess.expunge_all() alist = sess.query(A).all() eq_( [ A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]), A(col2="a2", bs=[]), A(col2="a3", bs=[B(col2="b3")]) ], alist) for a in alist: sess.delete(a) sess.flush()
def test_basic(self): mapper(Employee, employees) mapper(Department, departments, properties=dict( employees=relation(Employee, lazy=False, backref='department'))) d1 = Department(name='One') for e in 'Jim', 'Jack', 'John', 'Susan': d1.employees.append(Employee(name=e)) d2 = Department(name='Two') for e in 'Joe', 'Bob', 'Mary', 'Wally': d2.employees.append(Employee(name=e)) sess = create_session() sess.add_all((d1, d2)) sess.flush() q = (sess.query(Department). join('employees'). filter(Employee.name.startswith('J')). distinct(). order_by([sa.desc(Department.name)])) eq_(q.count(), 2) assert q[0] is d2
def test_of_type(self): mapper(Company, companies, properties={ 'employees':relationship(Employee, backref='company') }) mapper(Employee, employees, polymorphic_on=employees.c.type) mapper(Manager, inherits=Employee, polymorphic_identity='manager') mapper(Engineer, inherits=Employee, polymorphic_identity='engineer') mapper(JuniorEngineer, inherits=Engineer, polymorphic_identity='juniorengineer') sess = sessionmaker()() c1 = Company(name='c1') c2 = Company(name='c2') m1 = Manager(name='Tom', manager_data='data1', company=c1) m2 = Manager(name='Tom2', manager_data='data2', company=c2) e1 = Engineer(name='Kurt', engineer_info='knows how to hack', company=c2) e2 = JuniorEngineer(name='Ed', engineer_info='oh that ed', company=c1) sess.add_all([c1, c2, m1, m2, e1, e2]) sess.commit() sess.expunge_all() eq_( sess.query(Company).filter(Company.employees.of_type(JuniorEngineer).any()).all(), [ Company(name='c1'), ] ) eq_( sess.query(Company).join(Company.employees.of_type(JuniorEngineer)).all(), [ Company(name='c1'), ] )
def test_many_to_one_cascade(self): mapper(Address, addresses, properties={ 'user':relationship(User) }) mapper(User, users) u1 = User(id=1, name="u1") a1 =Address(id=1, email_address="a1", user=u1) u2 = User(id=2, name="u2") sess = create_session() sess.add_all([a1, u2]) sess.flush() a1.user = u2 sess2 = create_session() a2 = sess2.merge(a1) eq_( attributes.get_history(a2, 'user'), ([u2], (), [attributes.PASSIVE_NO_RESULT]) ) assert a2 in sess2.dirty sess.refresh(a1) sess2 = create_session() a2 = sess2.merge(a1, load=False) eq_( attributes.get_history(a2, 'user'), ((), [u1], ()) ) assert a2 not in sess2.dirty
def test_no_relationship_cascade(self): """test that merge doesn't interfere with a relationship() target that specifically doesn't include 'merge' cascade. """ mapper(Address, addresses, properties={ 'user':relationship(User, cascade="save-update") }) mapper(User, users) sess = create_session() u1 = User(name="fred") a1 = Address(email_address="asdf", user=u1) sess.add(a1) sess.flush() a2 = Address(id=a1.id, email_address="bar", user=User(name="hoho")) a2 = sess.merge(a2) sess.flush() # no expire of the attribute assert a2.__dict__['user'] is u1 # merge succeeded eq_( sess.query(Address).all(), [Address(id=a1.id, email_address="bar")] ) # didn't touch user eq_( sess.query(User).all(), [User(name="fred")] )
def test_self_referential(self): sess = create_session() c1_employees = [e1, e2, b1, m1] palias = aliased(Person) eq_( sess.query(Person, palias).filter(Person.company_id==palias.company_id).filter(Person.name=='dogbert').\ filter(Person.person_id>palias.person_id).order_by(Person.person_id, palias.person_id).all(), [ (m1, e1), (m1, e2), (m1, b1), ] ) eq_( sess.query(Person, palias).filter(Person.company_id==palias.company_id).filter(Person.name=='dogbert').\ filter(Person.person_id>palias.person_id).from_self().order_by(Person.person_id, palias.person_id).all(), [ (m1, e1), (m1, e2), (m1, b1), ] )
def testbasic(self): class Foo(object): def __init__(self, data=None): self.data = data def __repr__(self): return "Foo id %d, data %s" % (self.id, self.data) mapper(Foo, foo) class Bar(Foo): def __repr__(self): return "Bar id %d, data %s" % (self.id, self.data) mapper(Bar, bar, inherits=Foo, properties={"foos": relation(Foo, secondary=bar_foo, lazy=True)}) sess = create_session() b = Bar("bar #1") sess.add(b) b.foos.append(Foo("foo #1")) b.foos.append(Foo("foo #2")) sess.flush() compare = repr(b) + repr(sorted([repr(o) for o in b.foos])) sess.expunge_all() l = sess.query(Bar).all() print repr(l[0]) + repr(l[0].foos) found = repr(l[0]) + repr(sorted([repr(o) for o in l[0].foos])) eq_(found, compare)
def test_mixed_transaction_control(self): mapper(User, users) sess = create_session(autocommit=True) sess.begin() sess.begin_nested() transaction = sess.begin(subtransactions=True) sess.add(User(name='u1')) transaction.commit() sess.commit() sess.commit() sess.close() eq_(len(sess.query(User).all()), 1) t1 = sess.begin() t2 = sess.begin_nested() sess.add(User(name='u2')) t2.commit() assert sess.transaction is t1 sess.close()
def test_weakref_with_cycles_o2o(self): s = sessionmaker()() mapper(User, users, properties={ "address":relationship(Address, backref="user", uselist=False) }) mapper(Address, addresses) s.add(User(name="ed", address=Address(email_address="ed1"))) s.commit() user = s.query(User).options(joinedload(User.address)).one() user.address.user eq_(user, User(name="ed", address=Address(email_address="ed1"))) del user gc_collect() assert len(s.identity_map) == 0 user = s.query(User).options(joinedload(User.address)).one() user.address.email_address='ed2' user.address.user # lazyload del user gc_collect() assert len(s.identity_map) == 2 s.commit() user = s.query(User).options(joinedload(User.address)).one() eq_(user, User(name="ed", address=Address(email_address="ed2")))
def _test_manytoone(self, passive_updates): mapper(User, users) mapper(Address, addresses, properties={ 'user':relation(User, passive_updates=passive_updates) }) sess = create_session() a1 = Address(email='jack1') a2 = Address(email='jack2') u1 = User(username='******', fullname='jack') a1.user = u1 a2.user = u1 sess.add(a1) sess.add(a2) sess.flush() u1.username = '******' def go(): sess.flush() if passive_updates: self.assert_sql_count(testing.db, go, 1) else: self.assert_sql_count(testing.db, go, 3) def go(): sess.flush() self.assert_sql_count(testing.db, go, 0) assert a1.username == a2.username == 'ed' sess.expunge_all() eq_([Address(username='******'), Address(username='******')], sess.query(Address).all())
def test_nested_transaction_connection_add(self): mapper(User, users) sess = create_session(autocommit=True) sess.begin() sess.begin_nested() u1 = User(name='u1') sess.add(u1) sess.flush() sess.rollback() u2 = User(name='u2') sess.add(u2) sess.commit() eq_(set(sess.query(User).all()), set([u2])) sess.begin() sess.begin_nested() u3 = User(name='u3') sess.add(u3) sess.commit() # commit the nested transaction sess.rollback() eq_(set(sess.query(User).all()), set([u2])) sess.close()
def test_no_load_with_backrefs(self): """load=False populates relationships in both directions without requiring a load""" mapper(User, users, properties={ 'addresses':relationship(mapper(Address, addresses), backref='user') }) u = User(id=7, name='fred', addresses=[ Address(email_address='ad1'), Address(email_address='ad2')]) sess = create_session() sess.add(u) sess.flush() sess.close() assert 'user' in u.addresses[1].__dict__ sess = create_session() u2 = sess.merge(u, load=False) assert 'user' in u2.addresses[1].__dict__ eq_(u2.addresses[1].user, User(id=7, name='fred')) sess.expire(u2.addresses[1], ['user']) assert 'user' not in u2.addresses[1].__dict__ sess.close() sess = create_session() u = sess.merge(u2, load=False) assert 'user' not in u.addresses[1].__dict__ eq_(u.addresses[1].user, User(id=7, name='fred'))
def test_reduce_aliased_join(self): metadata = MetaData() people = Table('people', metadata, Column('person_id', Integer, Sequence('person_id_seq', optional=True), primary_key=True), Column('name', String(50)), Column('type', String(30))) engineers = Table('engineers', metadata, Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30)), Column('engineer_name', String(50)), Column('primary_language', String(50)), ) managers = Table('managers', metadata, Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30)), Column('manager_name', String(50)) ) pjoin = people.outerjoin(engineers).outerjoin(managers).select(use_labels=True).alias('pjoin') eq_( util.column_set(sql_util.reduce_columns([pjoin.c.people_person_id, pjoin.c.engineers_person_id, pjoin.c.managers_person_id])), util.column_set([pjoin.c.people_person_id]) )
def test_count(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() u = sess.query(User).first() eq_(u.addresses.count(), 1)
def test_remove_orphans(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), order_by=Address.id, cascade="all, delete-orphan", backref='user') }) sess = create_session(autoflush=True) u = User(name='ed') u.addresses.append(Address(email_address='a')) u.addresses.append(Address(email_address='b')) u.addresses.append(Address(email_address='c')) u.addresses.append(Address(email_address='d')) u.addresses.append(Address(email_address='e')) u.addresses.append(Address(email_address='f')) sess.add(u) eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='c'), Address(email_address='d'), Address(email_address='e'), Address(email_address='f')], sess.query(Address).all()) eq_(Address(email_address='c'), u.addresses[2]) try: del u.addresses[3] assert False except TypeError, e: assert "doesn't support item deletion" in str(e), str(e)
def test_merge(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), order_by=addresses.c.email_address) }) sess = create_session() u1 = User(name='jack') a1 = Address(email_address='a1') a2 = Address(email_address='a2') a3 = Address(email_address='a3') u1.addresses.append(a2) u1.addresses.append(a3) sess.add_all([u1, a1]) sess.flush() u1 = User(id=u1.id, name='jack') u1.addresses.append(a1) u1.addresses.append(a3) u1 = sess.merge(u1) eq_(attributes.get_history(u1, 'addresses'), ( [a1], [a3], [a2] )) sess.flush() eq_( list(u1.addresses), [a1, a3] )
def test_joined_inheritance(self): class BaseClass(Base, ComparableEntity): __tablename__ = 'basetable' id = Column(Integer, primary_key=True) name = Column(String(50)) type = Column(String(20)) __mapper_args__ = { 'polymorphic_on': type, 'polymorphic_identity': 'base' } class SubClassSeparatePk(BaseClass): __tablename__ = 'subtable1' id = Column(Integer, primary_key=True) base_id = Column(Integer, ForeignKey('basetable.id')) subdata1 = Column(String(50)) __mapper_args__ = {'polymorphic_identity': 'sep'} class SubClassSamePk(BaseClass): __tablename__ = 'subtable2' id = Column(Integer, ForeignKey('basetable.id'), primary_key=True) subdata2 = Column(String(50)) __mapper_args__ = {'polymorphic_identity': 'same'} self.create_tables() sess = Session() sep1 = SubClassSeparatePk(name='sep1', subdata1='sep1subdata') base1 = BaseClass(name='base1') same1 = SubClassSamePk(name='same1', subdata2='same1subdata') sess.add_all([sep1, base1, same1]) sess.commit() base1.name = 'base1mod' same1.subdata2 = 'same1subdatamod' sep1.name = 'sep1mod' sess.commit() BaseClassHistory = BaseClass.__history_mapper__.class_ SubClassSeparatePkHistory = SubClassSeparatePk.__history_mapper__.class_ SubClassSamePkHistory = SubClassSamePk.__history_mapper__.class_ eq_( sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(), [ SubClassSeparatePkHistory( id=1, name=u'sep1', type=u'sep', version=1), BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), SubClassSamePkHistory( id=3, name=u'same1', type=u'same', version=1) ]) same1.subdata2 = 'same1subdatamod2' eq_( sess.query(BaseClassHistory).order_by( BaseClassHistory.id, BaseClassHistory.version).all(), [ SubClassSeparatePkHistory( id=1, name=u'sep1', type=u'sep', version=1), BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), SubClassSamePkHistory( id=3, name=u'same1', type=u'same', version=1), SubClassSamePkHistory( id=3, name=u'same1', type=u'same', version=2) ]) base1.name = 'base1mod2' eq_( sess.query(BaseClassHistory).order_by( BaseClassHistory.id, BaseClassHistory.version).all(), [ SubClassSeparatePkHistory( id=1, name=u'sep1', type=u'sep', version=1), BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), BaseClassHistory( id=2, name=u'base1mod', type=u'base', version=2), SubClassSamePkHistory( id=3, name=u'same1', type=u'same', version=1), SubClassSamePkHistory( id=3, name=u'same1', type=u'same', version=2) ])
def test_set_comparisons(self): Parent, Child = self.Parent, self.Child p1 = Parent('P1') p1.children = ['a','b','c'] control = set(['a','b','c']) for other in (set(['a','b','c']), set(['a','b','c','d']), set(['a']), set(['a','b']), set(['c','d']), set(['e', 'f', 'g']), set()): eq_(p1.children.union(other), control.union(other)) eq_(p1.children.difference(other), control.difference(other)) eq_((p1.children - other), (control - other)) eq_(p1.children.intersection(other), control.intersection(other)) eq_(p1.children.symmetric_difference(other), control.symmetric_difference(other)) eq_(p1.children.issubset(other), control.issubset(other)) eq_(p1.children.issuperset(other), control.issuperset(other)) self.assert_((p1.children == other) == (control == other)) self.assert_((p1.children != other) == (control != other)) self.assert_((p1.children < other) == (control < other)) self.assert_((p1.children <= other) == (control <= other)) self.assert_((p1.children > other) == (control > other)) self.assert_((p1.children >= other) == (control >= other))
def testtwo(self): """The original testcase that includes various complicating factors""" mapper(Phone, phone_numbers) mapper(Address, addresses, properties={ 'phones': relationship(Phone, lazy='joined', backref='address', order_by=phone_numbers.c.phone_id) }) mapper(Company, companies, properties={ 'addresses': relationship(Address, lazy='joined', backref='company', order_by=addresses.c.address_id) }) mapper(Item, items) mapper(Invoice, invoices, properties={ 'items': relationship(Item, lazy='joined', backref='invoice', order_by=items.c.item_id), 'company': relationship(Company, lazy='joined', backref='invoices') }) c1 = Company(company_name='company 1', addresses=[ Address(address='a1 address', phones=[ Phone(type='home', number='1111'), Phone(type='work', number='22222') ]), Address(address='a2 address', phones=[ Phone(type='home', number='3333'), Phone(type='work', number='44444') ]) ]) session = create_session() session.add(c1) session.flush() company_id = c1.company_id session.expunge_all() a = session.query(Company).get(company_id) # set up an invoice i1 = Invoice(date=datetime.datetime.now(), company=a) item1 = Item(code='aaaa', qty=1, invoice=i1) item2 = Item(code='bbbb', qty=2, invoice=i1) item3 = Item(code='cccc', qty=3, invoice=i1) session.flush() invoice_id = i1.invoice_id session.expunge_all() c = session.query(Company).get(company_id) session.expunge_all() i = session.query(Invoice).get(invoice_id) eq_(c, i.company)
def test_relation_to_subclass(self): mapper(Company, companies, properties={'engineers': relation(Engineer)}) mapper(Employee, employees, polymorphic_on=employees.c.type, properties={'company': relation(Company)}) mapper(Manager, inherits=Employee, polymorphic_identity='manager') mapper(Engineer, inherits=Employee, polymorphic_identity='engineer') mapper(JuniorEngineer, inherits=Engineer, polymorphic_identity='juniorengineer') sess = sessionmaker()() c1 = Company(name='c1') c2 = Company(name='c2') m1 = Manager(name='Tom', manager_data='data1', company=c1) m2 = Manager(name='Tom2', manager_data='data2', company=c2) e1 = Engineer(name='Kurt', engineer_info='knows how to hack', company=c2) e2 = JuniorEngineer(name='Ed', engineer_info='oh that ed', company=c1) sess.add_all([c1, c2, m1, m2, e1, e2]) sess.commit() eq_(c1.engineers, [e2]) eq_(c2.engineers, [e1]) sess.expunge_all() eq_( sess.query(Company).order_by(Company.name).all(), [ Company(name='c1', engineers=[JuniorEngineer(name='Ed')]), Company(name='c2', engineers=[Engineer(name='Kurt')]) ]) # eager load join should limit to only "Engineer" sess.expunge_all() eq_( sess.query(Company).options(eagerload('engineers')).order_by( Company.name).all(), [ Company(name='c1', engineers=[JuniorEngineer(name='Ed')]), Company(name='c2', engineers=[Engineer(name='Kurt')]) ]) # join() to Company.engineers, Employee as the requested entity sess.expunge_all() eq_( sess.query(Company, Employee).join(Company.engineers).order_by( Company.name).all(), [(Company(name='c1'), JuniorEngineer(name='Ed')), (Company(name='c2'), Engineer(name='Kurt'))]) # join() to Company.engineers, Engineer as the requested entity. # this actually applies the IN criterion twice which is less than ideal. sess.expunge_all() eq_( sess.query(Company, Engineer).join(Company.engineers).order_by( Company.name).all(), [(Company(name='c1'), JuniorEngineer(name='Ed')), (Company(name='c2'), Engineer(name='Kurt'))]) # join() to Company.engineers without any Employee/Engineer entity sess.expunge_all() eq_( sess.query(Company).join(Company.engineers).filter( Engineer.name.in_(['Tom', 'Kurt'])).all(), [Company(name='c2')]) # this however fails as it does not limit the subtypes to just "Engineer". # with joins constructed by filter(), we seem to be following a policy where # we don't try to make decisions on how to join to the target class, whereas when using join() we # seem to have a lot more capabilities. # we might want to document "advantages of join() vs. straight filtering", or add a large # section to "inheritance" laying out all the various behaviors Query has. @testing.fails_on_everything_except() def go(): sess.expunge_all() eq_(sess.query(Company).\ filter(Company.company_id==Engineer.company_id).filter(Engineer.name.in_(['Tom', 'Kurt'])).all(), [ Company(name='c2') ] ) go()
def _equivalent(self, q_proxy, q_direct): eq_(q_proxy.all(), q_direct.all())
def testbinary(self): testobj1 = pickleable.Foo('im foo 1') testobj2 = pickleable.Foo('im foo 2') testobj3 = pickleable.Foo('im foo 3') stream1 = self.load_stream('binary_data_one.dat') stream2 = self.load_stream('binary_data_two.dat') binary_table.insert().execute(primary_id=1, misc='binary_data_one.dat', data=stream1, data_slice=stream1[0:100], pickled=testobj1, mypickle=testobj3) binary_table.insert().execute(primary_id=2, misc='binary_data_two.dat', data=stream2, data_slice=stream2[0:99], pickled=testobj2) binary_table.insert().execute(primary_id=3, misc='binary_data_two.dat', data=None, data_slice=stream2[0:99], pickled=None) for stmt in ( binary_table.select(order_by=binary_table.c.primary_id), text( "select * from binary_table order by binary_table.primary_id", typemap={ 'pickled': PickleType, 'mypickle': MyPickleType }, bind=testing.db)): l = stmt.execute().fetchall() eq_(list(stream1), list(l[0]['data'])) eq_(list(stream1[0:100]), list(l[0]['data_slice'])) eq_(list(stream2), list(l[1]['data'])) eq_(testobj1, l[0]['pickled']) eq_(testobj2, l[1]['pickled']) eq_(testobj3.moredata, l[0]['mypickle'].moredata) eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
def test_multi_qualification(self): session = create_session() m1 = Manager(name='Tom', manager_data='knows how to manage things') e1 = Engineer(name='Kurt', engineer_info='knows how to hack') e2 = JuniorEngineer(name='Ed', engineer_info='oh that ed') session.add_all([m1, e1, e2]) session.flush() ealias = aliased(Engineer) eq_(session.query(Manager, ealias).all(), [(m1, e1), (m1, e2)]) eq_(session.query(Manager.name).all(), [("Tom", )]) eq_( session.query(Manager.name, ealias.name).all(), [("Tom", "Kurt"), ("Tom", "Ed")]) eq_( session.query(func.upper(Manager.name), func.upper(ealias.name)).all(), [("TOM", "KURT"), ("TOM", "ED")]) eq_( session.query(Manager).add_entity(ealias).all(), [(m1, e1), (m1, e2)]) eq_( session.query(Manager.name).add_column(ealias.name).all(), [("Tom", "Kurt"), ("Tom", "Ed")])
def test_and_match(self): results2 = matchtable.select().where( matchtable.c.title.match('python nutshell'), ).execute().fetchall() eq_([5], [r.id for r in results2])
def go(): n3 = n2.__class__(length=6) eq_(n3.scale, 6, dialect.name)
def test_simple_prefix_match(self): results = matchtable.select().where( matchtable.c.title.match('nut*')).execute().fetchall() eq_([5], [r.id for r in results])
def test_match_across_joins(self): results = matchtable.select().where( and_(cattable.c.id == matchtable.c.category_id, cattable.c.description.match('Ruby'))).order_by( matchtable.c.id).execute().fetchall() eq_([1, 3], [r.id for r in results])
def test_aggregate_2(self): query = create_session().query(func.avg(foo.c.bar)) avg = query.filter(foo.c.bar < 30).one()[0] eq_(float(round(avg, 1)), 14.5)
def test_or_match(self): results2 = matchtable.select().where( matchtable.c.title.match('nutshell OR ruby'), ).order_by( matchtable.c.id).execute().fetchall() eq_([3, 5], [r.id for r in results2])
def test_roundtrip(self): tokyo = WeatherLocation('Asia', 'Tokyo') newyork = WeatherLocation('North America', 'New York') toronto = WeatherLocation('North America', 'Toronto') london = WeatherLocation('Europe', 'London') dublin = WeatherLocation('Europe', 'Dublin') brasilia = WeatherLocation('South America', 'Brasila') quito = WeatherLocation('South America', 'Quito') tokyo.reports.append(Report(80.0)) newyork.reports.append(Report(75)) quito.reports.append(Report(85)) sess = create_session() for c in [tokyo, newyork, toronto, london, dublin, brasilia, quito]: sess.add(c) sess.commit() tokyo.city # reload 'city' attribute on tokyo sess.expunge_all() eq_( db2.execute(weather_locations.select()).fetchall(), [(1, 'Asia', 'Tokyo')]) eq_( db1.execute(weather_locations.select()).fetchall(), [(2, 'North America', 'New York'), (3, 'North America', 'Toronto')]) eq_( sess.execute(weather_locations.select(), shard_id='asia').fetchall(), [(1, 'Asia', 'Tokyo')]) t = sess.query(WeatherLocation).get(tokyo.id) eq_(t.city, tokyo.city) eq_(t.reports[0].temperature, 80.0) north_american_cities = sess.query(WeatherLocation).filter( WeatherLocation.continent == 'North America') eq_(set([c.city for c in north_american_cities]), set(['New York', 'Toronto'])) asia_and_europe = sess.query(WeatherLocation).filter( WeatherLocation.continent.in_(['Europe', 'Asia'])) eq_(set([c.city for c in asia_and_europe]), set(['Tokyo', 'London', 'Dublin']))
def test_simple_match(self): results = matchtable.select().where( matchtable.c.title.match('python')).order_by( matchtable.c.id).execute().fetchall() eq_([2, 5], [r.id for r in results])
def test_history(self): for base in (object, MyBaseClass, MyClass): class Foo(base): pass class Bar(base): pass attributes.register_class(Foo) attributes.register_class(Bar) attributes.register_attribute(Foo, "name", uselist=False, useobject=False) attributes.register_attribute(Foo, "bars", uselist=True, trackparent=True, useobject=True) attributes.register_attribute(Bar, "name", uselist=False, useobject=False) f1 = Foo() f1.name = 'f1' eq_( attributes.get_state_history(attributes.instance_state(f1), 'name'), (['f1'], (), ())) b1 = Bar() b1.name = 'b1' f1.bars.append(b1) eq_( attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b1], [], [])) attributes.instance_state(f1).commit_all( attributes.instance_dict(f1)) attributes.instance_state(b1).commit_all( attributes.instance_dict(b1)) eq_( attributes.get_state_history(attributes.instance_state(f1), 'name'), ((), ['f1'], ())) eq_( attributes.get_state_history(attributes.instance_state(f1), 'bars'), ((), [b1], ())) f1.name = 'f1mod' b2 = Bar() b2.name = 'b2' f1.bars.append(b2) eq_( attributes.get_state_history(attributes.instance_state(f1), 'name'), (['f1mod'], (), ['f1'])) eq_( attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b2], [b1], [])) f1.bars.remove(b1) eq_( attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b2], [], [b1]))
def test_proxy(self): stmts = [] cursor_stmts = [] class MyProxy(ConnectionProxy): def execute(self, conn, execute, clauseelement, *multiparams, **params): stmts.append((str(clauseelement), params, multiparams)) return execute(clauseelement, *multiparams, **params) def cursor_execute( self, execute, cursor, statement, parameters, context, executemany, ): cursor_stmts.append((str(statement), parameters, None)) return execute(cursor, statement, parameters, context) def assert_stmts(expected, received): for stmt, params, posn in expected: if not received: assert False while received: teststmt, testparams, testmultiparams = \ received.pop(0) teststmt = re.compile(r'[\n\t ]+', re.M).sub(' ', teststmt).strip() if teststmt.startswith(stmt) and (testparams == params or testparams == posn): break for engine in \ engines.testing_engine(options=dict(implicit_returning=False, proxy=MyProxy())), \ engines.testing_engine(options=dict(implicit_returning=False, proxy=MyProxy(), strategy='threadlocal')): m = MetaData(engine) t1 = Table( 't1', m, Column('c1', Integer, primary_key=True), Column('c2', String(50), default=func.lower('Foo'), primary_key=True)) m.create_all() try: t1.insert().execute(c1=5, c2='some data') t1.insert().execute(c1=6) eq_( engine.execute('select * from t1').fetchall(), [(5, 'some data'), (6, 'foo')]) finally: m.drop_all() engine.dispose() compiled = [('CREATE TABLE t1', {}, None), ('INSERT INTO t1 (c1, c2)', { 'c2': 'some data', 'c1': 5 }, None), ('INSERT INTO t1 (c1, c2)', { 'c1': 6 }, None), ('select * from t1', {}, None), ('DROP TABLE t1', {}, None)] if not testing.against('oracle+zxjdbc'): # or engine.dialect.pr # eexecute_pk_sequence # s: cursor = [ ('CREATE TABLE t1', {}, ()), ('INSERT INTO t1 (c1, c2)', { 'c2': 'some data', 'c1': 5 }, (5, 'some data')), ('SELECT lower', { 'lower_2': 'Foo' }, ('Foo', )), ('INSERT INTO t1 (c1, c2)', { 'c2': 'foo', 'c1': 6 }, (6, 'foo')), ('select * from t1', {}, ()), ('DROP TABLE t1', {}, ()), ] else: insert2_params = 6, 'Foo' if testing.against('oracle+zxjdbc'): insert2_params += (ReturningParam(12), ) cursor = [('CREATE TABLE t1', {}, ()), ('INSERT INTO t1 (c1, c2)', { 'c2': 'some data', 'c1': 5 }, (5, 'some data')), ('INSERT INTO t1 (c1, c2)', { 'c1': 6, 'lower_2': 'Foo' }, insert2_params), ('select * from t1', {}, ()), ('DROP TABLE t1', {}, ()) ] # bind param name 'lower_2' might # be incorrect assert_stmts(compiled, stmts) assert_stmts(cursor, cursor_stmts)
def test_deferred(self): for base in (object, MyBaseClass, MyClass): class Foo(base): pass data = {'a': 'this is a', 'b': 12} def loader(state, keys): for k in keys: state.dict[k] = data[k] return attributes.ATTR_WAS_SET attributes.register_class(Foo) manager = attributes.manager_of_class(Foo) manager.deferred_scalar_loader = loader attributes.register_attribute(Foo, 'a', uselist=False, useobject=False) attributes.register_attribute(Foo, 'b', uselist=False, useobject=False) assert Foo in attributes.instrumentation_registry._state_finders f = Foo() attributes.instance_state(f).expire_attributes( attributes.instance_dict(f), None) eq_(f.a, "this is a") eq_(f.b, 12) f.a = "this is some new a" attributes.instance_state(f).expire_attributes( attributes.instance_dict(f), None) eq_(f.a, "this is a") eq_(f.b, 12) attributes.instance_state(f).expire_attributes( attributes.instance_dict(f), None) f.a = "this is another new a" eq_(f.a, "this is another new a") eq_(f.b, 12) attributes.instance_state(f).expire_attributes( attributes.instance_dict(f), None) eq_(f.a, "this is a") eq_(f.b, 12) del f.a eq_(f.a, None) eq_(f.b, 12) attributes.instance_state(f).commit_all( attributes.instance_dict(f)) eq_(f.a, None) eq_(f.b, 12)
def test_instance_deferred_cols(self): mapper(User, users, properties={'addresses': relationship(Address, backref="user")}) mapper(Address, addresses) sess = create_session() u1 = User(name='ed') u1.addresses.append(Address(email_address='*****@*****.**')) sess.add(u1) sess.flush() sess.expunge_all() u1 = sess.query(User).\ options(sa.orm.defer('name'), sa.orm.defer('addresses.email_address')).\ get(u1.id) assert 'name' not in u1.__dict__ assert 'addresses' not in u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() sess2.add(u2) eq_(u2.name, 'ed') assert 'addresses' not in u2.__dict__ ad = u2.addresses[0] assert 'email_address' not in ad.__dict__ eq_(ad.email_address, '*****@*****.**') eq_(u2, User(name='ed', addresses=[Address(email_address='*****@*****.**')])) u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() u2 = sess2.merge(u2, load=False) eq_(u2.name, 'ed') assert 'addresses' not in u2.__dict__ ad = u2.addresses[0] # mapper options now transmit over merge(), # new as of 0.6, so email_address is deferred. assert 'email_address' not in ad.__dict__ eq_(ad.email_address, '*****@*****.**') eq_(u2, User(name='ed', addresses=[Address(email_address='*****@*****.**')]))