def test_class_deferred_cols(self): addresses, users = (self.tables.addresses, self.tables.users) mapper(User, users, properties={ 'name':sa.orm.deferred(users.c.name), 'addresses':relationship(Address, backref="user") }) mapper(Address, addresses, properties={ 'email_address':sa.orm.deferred(addresses.c.email_address) }) sess = create_session() u1 = User(name='ed') u1.addresses.append(Address(email_address='*****@*****.**')) sess.add(u1) sess.flush() sess.expunge_all() u1 = sess.query(User).get(u1.id) assert 'name' not in u1.__dict__ assert 'addresses' not in u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() sess2.add(u2) eq_(u2.name, 'ed') eq_(u2, User(name='ed', addresses=[Address(email_address='*****@*****.**')])) u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() u2 = sess2.merge(u2, load=False) eq_(u2.name, 'ed') eq_(u2, User(name='ed', addresses=[Address(email_address='*****@*****.**')]))
def test_class_deferred_cols(self): addresses, users = (self.tables.addresses, self.tables.users) mapper(User, users, properties={ 'name': sa.orm.deferred(users.c.name), 'addresses': relationship(Address, backref="user") }) mapper(Address, addresses, properties={ 'email_address': sa.orm.deferred(addresses.c.email_address) }) sess = create_session() u1 = User(name='ed') u1.addresses.append(Address(email_address='*****@*****.**')) sess.add(u1) sess.flush() sess.expunge_all() u1 = sess.query(User).get(u1.id) assert 'name' not in u1.__dict__ assert 'addresses' not in u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() sess2.add(u2) eq_(u2.name, 'ed') eq_(u2, User(name='ed', addresses=[Address(email_address='*****@*****.**')])) u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() u2 = sess2.merge(u2, load=False) eq_(u2.name, 'ed') eq_(u2, User(name='ed', addresses=[Address(email_address='*****@*****.**')]))
def test_options_with_descriptors(self): users, addresses, dingalings = (self.tables.users, self.tables.addresses, self.tables.dingalings) mapper(User, users, properties={ 'addresses':relationship(Address, backref="user") }) mapper(Address, addresses, properties={ 'dingaling':relationship(Dingaling) }) mapper(Dingaling, dingalings) sess = create_session() u1 = User(name='ed') u1.addresses.append(Address(email_address='*****@*****.**')) sess.add(u1) sess.flush() sess.expunge_all() for opt in [ sa.orm.joinedload(User.addresses), sa.orm.joinedload("addresses"), sa.orm.defer("name"), sa.orm.defer(User.name), sa.orm.joinedload("addresses", Address.dingaling), ]: opt2 = pickle.loads(pickle.dumps(opt)) eq_(opt.key, opt2.key) u1 = sess.query(User).options(opt).first() u2 = pickle.loads(pickle.dumps(u1))
def test_instance_deferred_cols(self): users, addresses = (self.tables.users, self.tables.addresses) self.mapper_registry.map_imperatively( User, users, properties={"addresses": relationship(Address, backref="user")}, ) self.mapper_registry.map_imperatively(Address, addresses) with fixture_session(expire_on_commit=False) as sess: u1 = User(name="ed") u1.addresses.append(Address(email_address="*****@*****.**")) sess.add(u1) sess.commit() with fixture_session(expire_on_commit=False) as sess: u1 = sess.get( User, u1.id, options=[ sa.orm.defer(User.name), sa.orm.defaultload(User.addresses).defer( Address.email_address), ], ) assert "name" not in u1.__dict__ assert "addresses" not in u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) with fixture_session() as sess2: sess2.add(u2) eq_(u2.name, "ed") assert "addresses" not in u2.__dict__ ad = u2.addresses[0] assert "email_address" not in ad.__dict__ eq_(ad.email_address, "*****@*****.**") eq_( u2, User(name="ed", addresses=[Address(email_address="*****@*****.**")]), ) u2 = pickle.loads(pickle.dumps(u1)) with fixture_session() as sess2: u2 = sess2.merge(u2, load=False) eq_(u2.name, "ed") assert "addresses" not in u2.__dict__ ad = u2.addresses[0] # mapper options now transmit over merge(), # new as of 0.6, so email_address is deferred. assert "email_address" not in ad.__dict__ eq_(ad.email_address, "*****@*****.**") eq_( u2, User(name="ed", addresses=[Address(email_address="*****@*****.**")]), )
def test_unbound_options(self, test_case): sess, User, Address, Dingaling = self._option_test_fixture() opt = testing.resolve_lambda(test_case, User=User, Address=Address) opt2 = pickle.loads(pickle.dumps(opt)) eq_(opt.path, opt2.path) u1 = sess.query(User).options(opt).first() pickle.loads(pickle.dumps(u1))
def test_instance_deferred_cols(self): users, addresses = (self.tables.users, self.tables.addresses) mapper( User, users, properties={"addresses": relationship(Address, backref="user")}, ) mapper(Address, addresses) sess = create_session() u1 = User(name="ed") u1.addresses.append(Address(email_address="*****@*****.**")) sess.add(u1) sess.flush() sess.expunge_all() u1 = ( sess.query(User) .options( sa.orm.defer("name"), sa.orm.defer("addresses.email_address") ) .get(u1.id) ) assert "name" not in u1.__dict__ assert "addresses" not in u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() sess2.add(u2) eq_(u2.name, "ed") assert "addresses" not in u2.__dict__ ad = u2.addresses[0] assert "email_address" not in ad.__dict__ eq_(ad.email_address, "*****@*****.**") eq_( u2, User(name="ed", addresses=[Address(email_address="*****@*****.**")]), ) u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() u2 = sess2.merge(u2, load=False) eq_(u2.name, "ed") assert "addresses" not in u2.__dict__ ad = u2.addresses[0] # mapper options now transmit over merge(), # new as of 0.6, so email_address is deferred. assert "email_address" not in ad.__dict__ eq_(ad.email_address, "*****@*****.**") eq_( u2, User(name="ed", addresses=[Address(email_address="*****@*****.**")]), )
def test_class_deferred_cols(self): addresses, users = (self.tables.addresses, self.tables.users) mapper( User, users, properties={ "name": sa.orm.deferred(users.c.name), "addresses": relationship(Address, backref="user"), }, ) mapper( Address, addresses, properties={ "email_address": sa.orm.deferred(addresses.c.email_address) }, ) with fixture_session(expire_on_commit=False) as sess: u1 = User(name="ed") u1.addresses.append(Address(email_address="*****@*****.**")) sess.add(u1) sess.commit() with fixture_session() as sess: u1 = sess.query(User).get(u1.id) assert "name" not in u1.__dict__ assert "addresses" not in u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) with fixture_session() as sess2: sess2.add(u2) eq_(u2.name, "ed") eq_( u2, User( name="ed", addresses=[Address(email_address="*****@*****.**")] ), ) u2 = pickle.loads(pickle.dumps(u1)) with fixture_session() as sess2: u2 = sess2.merge(u2, load=False) eq_(u2.name, "ed") eq_( u2, User( name="ed", addresses=[Address(email_address="*****@*****.**")] ), )
def test_unbound_options(self): sess, User, Address, Dingaling = self._option_test_fixture() for opt in [ sa.orm.joinedload(User.addresses), sa.orm.joinedload("addresses"), sa.orm.defer("name"), sa.orm.defer(User.name), sa.orm.joinedload("addresses").joinedload(Address.dingaling), ]: opt2 = pickle.loads(pickle.dumps(opt)) eq_(opt.path, opt2.path) u1 = sess.query(User).options(opt).first() pickle.loads(pickle.dumps(u1))
def test_polymorphic_deferred(self): email_users, users = (self.tables.email_users, self.tables.users) mapper( User, users, polymorphic_identity="user", polymorphic_on=users.c.type, ) mapper( EmailUser, email_users, inherits=User, polymorphic_identity="emailuser", ) eu = EmailUser(name="user1", email_address="*****@*****.**") sess = create_session() sess.add(eu) sess.flush() sess.expunge_all() eu = sess.query(User).first() eu2 = pickle.loads(pickle.dumps(eu)) sess2 = create_session() sess2.add(eu2) assert "email_address" not in eu2.__dict__ eq_(eu2.email_address, "*****@*****.**")
def test_polymorphic_deferred(self): email_users, users = (self.tables.email_users, self.tables.users) mapper( User, users, polymorphic_identity="user", polymorphic_on=users.c.type, ) mapper( EmailUser, email_users, inherits=User, polymorphic_identity="emailuser", ) eu = EmailUser(name="user1", email_address="*****@*****.**") with fixture_session() as sess: sess.add(eu) sess.commit() with fixture_session() as sess: eu = sess.query(User).first() eu2 = pickle.loads(pickle.dumps(eu)) sess2 = fixture_session() sess2.add(eu2) assert "email_address" not in eu2.__dict__ eq_(eu2.email_address, "*****@*****.**")
def test_lazyload_extra_criteria_not_supported(self): users, addresses = (self.tables.users, self.tables.addresses) mapper( User, users, properties={"addresses": relationship(Address)}, ) mapper(Address, addresses) sess = fixture_session() u1 = User( name="ed", addresses=[ Address(email_address="*****@*****.**"), Address(email_address="*****@*****.**"), ], ) sess.add(u1) sess.commit() sess.close() u1 = (sess.query(User).options( lazyload(User.addresses.and_( Address.email_address == "*****@*****.**"))).first()) with testing.expect_warnings( r"Can't reliably serialize a lazyload\(\) option"): u2 = pickle.loads(pickle.dumps(u1)) eq_(len(u1.addresses), 1) sess = fixture_session() sess.add(u2) eq_(len(u2.addresses), 2)
def test_instance_lazy_relation_loaders(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={ 'addresses': relationship(Address, lazy='noload') }) mapper(Address, addresses) sess = Session() u1 = User(name='ed', addresses=[ Address( email_address='*****@*****.**', ) ]) sess.add(u1) sess.commit() sess.close() u1 = sess.query(User).options( lazyload(User.addresses) ).first() u2 = pickle.loads(pickle.dumps(u1)) sess = Session() sess.add(u2) assert u2.addresses
def test_collection_setstate(self): """test a particular cycle that requires CollectionAdapter to not rely upon InstanceState to deserialize.""" m = MetaData() c1 = Table('c1', m, Column('parent_id', String, ForeignKey('p.id'), primary_key=True) ) c2 = Table('c2', m, Column('parent_id', String, ForeignKey('p.id'), primary_key=True) ) p = Table('p', m, Column('id', String, primary_key=True) ) mapper(Parent, p, properties={ 'children1':relationship(Child1), 'children2':relationship(Child2) }) mapper(Child1, c1) mapper(Child2, c2) obj = Parent() screen1 = Screen(obj) screen1.errors = [obj.children1, obj.children2] screen2 = Screen(Child2(), screen1) pickle.loads(pickle.dumps(screen2))
def test_weakref_pickled(self): users, User = self.tables.users, pickleable.User s = create_session() mapper(User, users) s.add(User(name='ed')) s.flush() assert not s.dirty user = s.query(User).one() user.name = 'fred' s.expunge(user) u2 = pickle.loads(pickle.dumps(user)) del user s.add(u2) del u2 gc_collect() assert len(s.identity_map) == 1 assert len(s.dirty) == 1 assert None not in s.dirty s.flush() gc_collect() assert not s.dirty assert not s.identity_map
def test_weak_ref_pickled(self): users, User = self.tables.users, pickleable.User s = create_session() mapper(User, users) s.add(User(name='ed')) s.flush() assert not s.dirty user = s.query(User).one() user.name = 'fred' s.expunge(user) u2 = pickle.loads(pickle.dumps(user)) del user s.add(u2) del u2 gc_collect() assert len(s.identity_map) == 1 assert len(s.dirty) == 1 assert None not in s.dirty s.flush() gc_collect() assert not s.dirty assert not s.identity_map
def test_polymorphic_deferred(self): email_users, users = ( self.tables.email_users, self.tables.users, ) mapper(User, users, polymorphic_identity='user', polymorphic_on=users.c.type) mapper(EmailUser, email_users, inherits=User, polymorphic_identity='emailuser') eu = EmailUser(name="user1", email_address='*****@*****.**') sess = create_session() sess.add(eu) sess.flush() sess.expunge_all() eu = sess.query(User).first() eu2 = pickle.loads(pickle.dumps(eu)) sess2 = create_session() sess2.add(eu2) assert 'email_address' not in eu2.__dict__ eq_(eu2.email_address, '*****@*****.**')
def test_collection_setstate(self): """test a particular cycle that requires CollectionAdapter to not rely upon InstanceState to deserialize.""" m = MetaData() c1 = Table( "c1", m, Column("parent_id", String, ForeignKey("p.id"), primary_key=True), ) c2 = Table( "c2", m, Column("parent_id", String, ForeignKey("p.id"), primary_key=True), ) p = Table("p", m, Column("id", String, primary_key=True)) mapper( Parent, p, properties={ "children1": relationship(Child1), "children2": relationship(Child2), }, ) mapper(Child1, c1) mapper(Child2, c2) obj = Parent() screen1 = Screen(obj) screen1.errors = [obj.children1, obj.children2] screen2 = Screen(Child2(), screen1) pickle.loads(pickle.dumps(screen2))
def test_instance_lazy_relation_loaders(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={ 'addresses':relationship(Address, lazy='noload') }) mapper(Address, addresses) sess = Session() u1 = User(name='ed', addresses=[ Address( email_address='*****@*****.**', ) ]) sess.add(u1) sess.commit() sess.close() u1 = sess.query(User).options( lazyload(User.addresses) ).first() u2 = pickle.loads(pickle.dumps(u1)) sess = Session() sess.add(u2) assert u2.addresses
def test_instance_deferred_cols(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={ 'addresses': relationship(Address, backref="user") }) mapper(Address, addresses) sess = create_session() u1 = User(name='ed') u1.addresses.append(Address(email_address='*****@*****.**')) sess.add(u1) sess.flush() sess.expunge_all() u1 = sess.query(User).\ options(sa.orm.defer('name'), sa.orm.defer('addresses.email_address')).\ get(u1.id) assert 'name' not in u1.__dict__ assert 'addresses' not in u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() sess2.add(u2) eq_(u2.name, 'ed') assert 'addresses' not in u2.__dict__ ad = u2.addresses[0] assert 'email_address' not in ad.__dict__ eq_(ad.email_address, '*****@*****.**') eq_(u2, User(name='ed', addresses=[ Address(email_address='*****@*****.**')])) u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() u2 = sess2.merge(u2, load=False) eq_(u2.name, 'ed') assert 'addresses' not in u2.__dict__ ad = u2.addresses[0] # mapper options now transmit over merge(), # new as of 0.6, so email_address is deferred. assert 'email_address' not in ad.__dict__ eq_(ad.email_address, '*****@*****.**') eq_(u2, User(name='ed', addresses=[ Address(email_address='*****@*****.**')]))
def test_instance_deferred_cols(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={'addresses': relationship(Address, backref="user")}) mapper(Address, addresses) sess = create_session() u1 = User(name='ed') u1.addresses.append(Address(email_address='*****@*****.**')) sess.add(u1) sess.flush() sess.expunge_all() u1 = sess.query(User).\ options(sa.orm.defer('name'), sa.orm.defer('addresses.email_address')).\ get(u1.id) assert 'name' not in u1.__dict__ assert 'addresses' not in u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() sess2.add(u2) eq_(u2.name, 'ed') assert 'addresses' not in u2.__dict__ ad = u2.addresses[0] assert 'email_address' not in ad.__dict__ eq_(ad.email_address, '*****@*****.**') eq_(u2, User(name='ed', addresses=[Address(email_address='*****@*****.**')])) u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() u2 = sess2.merge(u2, load=False) eq_(u2.name, 'ed') assert 'addresses' not in u2.__dict__ ad = u2.addresses[0] # mapper options now transmit over merge(), # new as of 0.6, so email_address is deferred. assert 'email_address' not in ad.__dict__ eq_(ad.email_address, '*****@*****.**') eq_(u2, User(name='ed', addresses=[Address(email_address='*****@*****.**')]))
def test_load(self): s = Session() with_poly = with_polymorphic(Person, [Engineer, Manager], flat=True) emp = s.query(Company).options( subqueryload(Company.employees.of_type(with_poly))).first() e2 = pickle.loads(pickle.dumps(emp))
def test_load(self): s = fixture_session() with_poly = with_polymorphic(Person, [Engineer, Manager], flat=True) emp = (s.query(Company).options( subqueryload(Company.employees.of_type(with_poly))).first()) pickle.loads(pickle.dumps(emp))
def test_class_deferred_cols(self): addresses, users = (self.tables.addresses, self.tables.users) mapper( User, users, properties={ "name": sa.orm.deferred(users.c.name), "addresses": relationship(Address, backref="user"), }, ) mapper( Address, addresses, properties={ "email_address": sa.orm.deferred(addresses.c.email_address) }, ) sess = create_session() u1 = User(name="ed") u1.addresses.append(Address(email_address="*****@*****.**")) sess.add(u1) sess.flush() sess.expunge_all() u1 = sess.query(User).get(u1.id) assert "name" not in u1.__dict__ assert "addresses" not in u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() sess2.add(u2) eq_(u2.name, "ed") eq_( u2, User(name="ed", addresses=[Address(email_address="*****@*****.**")]), ) u2 = pickle.loads(pickle.dumps(u1)) sess2 = create_session() u2 = sess2.merge(u2, load=False) eq_(u2.name, "ed") eq_( u2, User(name="ed", addresses=[Address(email_address="*****@*****.**")]), )
def persistent_id(obj): # print "serializing:", repr(obj) if isinstance(obj, QueryableAttribute): cls = obj.impl.class_ key = obj.impl.key id = "attribute:" + key + ":" + b64encode(pickle.dumps(cls)) elif isinstance(obj, Mapper) and not obj.non_primary: id = "mapper:" + b64encode(pickle.dumps(obj.class_)) elif isinstance(obj, Table): id = "table:" + str(obj) elif isinstance(obj, Column) and isinstance(obj.table, Table): id = "column:" + str(obj.table) + ":" + obj.key elif isinstance(obj, Session): id = "session:" elif isinstance(obj, Engine): id = "engine:" else: return None return id
def persistent_id(obj): #print "serializing:", repr(obj) if isinstance(obj, QueryableAttribute): cls = obj.impl.class_ key = obj.impl.key id = "attribute:" + key + ":" + b64encode(pickle.dumps(cls)) elif isinstance(obj, Mapper) and not obj.non_primary: id = "mapper:" + b64encode(pickle.dumps(obj.class_)) elif isinstance(obj, Table): id = "table:" + str(obj) elif isinstance(obj, Column) and isinstance(obj.table, Table): id = "column:" + str(obj.table) + ":" + obj.key elif isinstance(obj, Session): id = "session:" elif isinstance(obj, Engine): id = "engine:" else: return None return id
def test_pickled_update(self): users, User = self.tables.users, pickleable.User mapper(User, users) sess1 = create_session() sess2 = create_session() u1 = User(name="u1") sess1.add(u1) assert_raises_message(sa.exc.InvalidRequestError, "already attached to session", sess2.add, u1) u2 = pickle.loads(pickle.dumps(u1)) sess2.add(u2)
def test_bound_options(self): sess, User, Address, Dingaling = self._option_test_fixture() for opt in [ sa.orm.Load(User).joinedload(User.addresses), sa.orm.Load(User).joinedload("addresses"), sa.orm.Load(User).defer("name"), sa.orm.Load(User).defer(User.name), sa.orm.Load(User).joinedload("addresses").joinedload( Address.dingaling), sa.orm.Load(User).joinedload( "addresses", innerjoin=True).joinedload(Address.dingaling), ]: opt2 = pickle.loads(pickle.dumps(opt)) eq_(opt.path, opt2.path) eq_(opt.context.keys(), opt2.context.keys()) eq_(opt.local_opts, opt2.local_opts) u1 = sess.query(User).options(opt).first() pickle.loads(pickle.dumps(u1))
def test_became_bound_options(self): sess, User, Address, Dingaling = self._option_test_fixture() for opt in [ sa.orm.joinedload(User.addresses), sa.orm.defer(User.name), sa.orm.joinedload(User.addresses).joinedload( Address.dingaling), ]: context = sess.query(User).options(opt)._compile_context() opt = [ v for v in context.attributes.values() if isinstance(v, sa.orm.Load) ][0] opt2 = pickle.loads(pickle.dumps(opt)) eq_(opt.path, opt2.path) eq_(opt.local_opts, opt2.local_opts) u1 = sess.query(User).options(opt).first() pickle.loads(pickle.dumps(u1))
def test_pickled_update(self): users, User = self.tables.users, pickleable.User mapper(User, users) sess1 = create_session() sess2 = create_session() u1 = User(name='u1') sess1.add(u1) assert_raises_message(sa.exc.InvalidRequestError, 'already attached to session', sess2.add, u1) u2 = pickle.loads(pickle.dumps(u1)) sess2.add(u2)
def test_options_of_type(self): with_poly = with_polymorphic(Person, [Engineer, Manager], flat=True) for opt, serialized in [ (sa.orm.joinedload(Company.employees.of_type(Engineer)), [(Company, "employees", Engineer)]), (sa.orm.joinedload(Company.employees.of_type(with_poly)), [(Company, "employees", None)]), ]: opt2 = pickle.loads(pickle.dumps(opt)) eq_(opt.__getstate__()['path'], serialized) eq_(opt2.__getstate__()['path'], serialized)
def test_invalidated_flag_pickle(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={'addresses': relationship(Address, lazy='noload')}) mapper(Address, addresses) u1 = User() u1.addresses.append(Address()) u2 = pickle.loads(pickle.dumps(u1)) u2.addresses.append(Address()) eq_(len(u2.addresses), 2)
def test_no_mappers(self): users = self.tables.users umapper = mapper(User, users) u1 = User(name='ed') u1_pickled = pickle.dumps(u1, -1) clear_mappers() assert_raises_message( orm_exc.UnmappedInstanceError, "Cannot deserialize object of type " "<class 'sqlalchemy.testing.pickleable.User'> - no mapper()", pickle.loads, u1_pickled)
def test_invalidated_flag_pickle(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={ 'addresses': relationship(Address, lazy='noload') }) mapper(Address, addresses) u1 = User() u1.addresses.append(Address()) u2 = pickle.loads(pickle.dumps(u1)) u2.addresses.append(Address()) eq_(len(u2.addresses), 2)
def test_options_of_type(self): with_poly = with_polymorphic(Person, [Engineer, Manager], flat=True) for opt, serialized in [ ( sa.orm.joinedload(Company.employees.of_type(Engineer)), [(Company, "employees", Engineer)]), ( sa.orm.joinedload(Company.employees.of_type(with_poly)), [(Company, "employees", None)]), ]: opt2 = pickle.loads(pickle.dumps(opt)) eq_(opt.__getstate__()['path'], serialized) eq_(opt2.__getstate__()['path'], serialized)
def test_no_mappers(self): users = self.tables.users umapper = mapper(User, users) u1 = User(name='ed') u1_pickled = pickle.dumps(u1, -1) clear_mappers() assert_raises_message( orm_exc.UnmappedInstanceError, "Cannot deserialize object of type <class 'sqlalchemy.testing.pickleable.User'> - no mapper()", pickle.loads, u1_pickled)
def test_tuple_labeling(self): sess = fixture_session() # test pickle + all the protocols ! for pickled in False, -1, 0, 1, 2: for row in sess.query(User, Address).join(User.addresses).all(): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row._fields), ["User", "Address"]) eq_(row.User, row[0]) eq_(row.Address, row[1]) for row in sess.query(User.name, User.id.label("foobar")): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row._fields), ["name", "foobar"]) eq_(row.name, row[0]) eq_(row.foobar, row[1]) for row in sess.query(User).with_entities( User.name, User.id.label("foobar") ): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row._fields), ["name", "foobar"]) eq_(row.name, row[0]) eq_(row.foobar, row[1]) oalias = aliased(Order) for row in ( sess.query(User, oalias) .join(User.orders.of_type(oalias)) .all() ): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row._fields), ["User"]) eq_(row.User, row[0]) oalias = aliased(Order, name="orders") for row in ( sess.query(User, oalias).join(oalias, User.orders).all() ): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row._fields), ["User", "orders"]) eq_(row.User, row[0]) eq_(row.orders, row[1]) for row in sess.query(User.name + "hoho", User.name): eq_(list(row._fields), ["name"]) eq_(row[0], row.name + "hoho") if pickled is not False: ret = sess.query(User, Address).join(User.addresses).all() pickle.loads(pickle.dumps(ret, pickled))
def test_tuple_labeling(self): users = self.tables.users sess = create_session() # test pickle + all the protocols ! for pickled in False, -1, 0, 1, 2: for row in sess.query(User, Address).join(User.addresses).all(): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["User", "Address"]) eq_(row.User, row[0]) eq_(row.Address, row[1]) for row in sess.query(User.name, User.id.label("foobar")): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["name", "foobar"]) eq_(row.name, row[0]) eq_(row.foobar, row[1]) for row in sess.query(User).values( User.name, User.id.label("foobar") ): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["name", "foobar"]) eq_(row.name, row[0]) eq_(row.foobar, row[1]) oalias = aliased(Order) for row in sess.query(User, oalias).join(User.orders).all(): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["User"]) eq_(row.User, row[0]) oalias = aliased(Order, name="orders") for row in ( sess.query(User, oalias).join(oalias, User.orders).all() ): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["User", "orders"]) eq_(row.User, row[0]) eq_(row.orders, row[1]) # test here that first col is not labeled, only # one name in keys, matches correctly for row in sess.query(User.name + "hoho", User.name): eq_(list(row.keys()), ["name"]) eq_(row[0], row.name + "hoho") if pickled is not False: ret = sess.query(User, Address).join(User.addresses).all() pickle.loads(pickle.dumps(ret, pickled))
def test_no_instrumentation(self): users = self.tables.users mapper(User, users) u1 = User(name="ed") u1_pickled = pickle.dumps(u1, -1) clear_mappers() mapper(User, users) u1 = pickle.loads(u1_pickled) # this fails unless the InstanceState # compiles the mapper eq_(str(u1), "User(name='ed')")
def test_invalidated_flag_pickle(self): users, addresses = (self.tables.users, self.tables.addresses) self.mapper_registry.map_imperatively( User, users, properties={"addresses": relationship(Address, lazy="noload")}, ) self.mapper_registry.map_imperatively(Address, addresses) u1 = User() u1.addresses.append(Address()) u2 = pickle.loads(pickle.dumps(u1)) u2.addresses.append(Address()) eq_(len(u2.addresses), 2)
def test_no_instrumentation(self): users = self.tables.users umapper = mapper(User, users) u1 = User(name='ed') u1_pickled = pickle.dumps(u1, -1) clear_mappers() umapper = mapper(User, users) u1 = pickle.loads(u1_pickled) # this fails unless the InstanceState # compiles the mapper eq_(str(u1), "User(name='ed')")
def test_rebuild_state(self): """not much of a 'test', but illustrate how to remove instance-level state before pickling. """ users = self.tables.users mapper(User, users) u1 = User() attributes.manager_of_class(User).teardown_instance(u1) assert not u1.__dict__ u2 = pickle.loads(pickle.dumps(u1)) attributes.manager_of_class(User).setup_instance(u2) assert attributes.instance_state(u2)
def test_tuple_labeling(self): users = self.tables.users sess = create_session() # test pickle + all the protocols ! for pickled in False, -1, 0, 1, 2: for row in sess.query(User, Address).join(User.addresses).all(): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ['User', 'Address']) eq_(row.User, row[0]) eq_(row.Address, row[1]) for row in sess.query(User.name, User.id.label('foobar')): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ['name', 'foobar']) eq_(row.name, row[0]) eq_(row.foobar, row[1]) for row in sess.query(User).values(User.name, User.id.label('foobar')): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ['name', 'foobar']) eq_(row.name, row[0]) eq_(row.foobar, row[1]) oalias = aliased(Order) for row in sess.query(User, oalias).join(User.orders).all(): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ['User']) eq_(row.User, row[0]) oalias = aliased(Order, name='orders') for row in sess.query(User, oalias).join(oalias, User.orders) \ .all(): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ['User', 'orders']) eq_(row.User, row[0]) eq_(row.orders, row[1]) # test here that first col is not labeled, only # one name in keys, matches correctly for row in sess.query(User.name + 'hoho', User.name): eq_(list(row.keys()), ['name']) eq_(row[0], row.name + 'hoho') if pickled is not False: ret = sess.query(User, Address).join(User.addresses).all() pickle.loads(pickle.dumps(ret, pickled))
def test_tuple_labeling(self): sess = create_session() # test pickle + all the protocols ! for pickled in False, -1, 0, 1, 2: for row in sess.query(User, Address).join(User.addresses).all(): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["User", "Address"]) eq_(row.User, row[0]) eq_(row.Address, row[1]) for row in sess.query(User.name, User.id.label("foobar")): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["name", "foobar"]) eq_(row.name, row[0]) eq_(row.foobar, row[1]) for row in sess.query(User).values(User.name, User.id.label("foobar")): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["name", "foobar"]) eq_(row.name, row[0]) eq_(row.foobar, row[1]) oalias = aliased(Order) for row in sess.query(User, oalias).join(User.orders).all(): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["User"]) eq_(row.User, row[0]) oalias = aliased(Order, name="orders") for row in (sess.query(User, oalias).join(oalias, User.orders).all()): if pickled is not False: row = pickle.loads(pickle.dumps(row, pickled)) eq_(list(row.keys()), ["User", "orders"]) eq_(row.User, row[0]) eq_(row.orders, row[1]) # test here that first col is not labeled, only # one name in keys, matches correctly for row in sess.query(User.name + "hoho", User.name): eq_(list(row.keys()), ["name"]) eq_(row[0], row.name + "hoho") if pickled is not False: ret = sess.query(User, Address).join(User.addresses).all() pickle.loads(pickle.dumps(ret, pickled))
def test_transient(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={'addresses': relationship(Address, backref="user")}) mapper(Address, addresses) sess = create_session() u1 = User(name='ed') u1.addresses.append(Address(email_address='*****@*****.**')) u2 = pickle.loads(pickle.dumps(u1)) sess.add(u2) sess.flush() sess.expunge_all() eq_(u1, sess.query(User).get(u2.id))
def test_transient(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={ 'addresses':relationship(Address, backref="user") }) mapper(Address, addresses) sess = create_session() u1 = User(name='ed') u1.addresses.append(Address(email_address='*****@*****.**')) u2 = pickle.loads(pickle.dumps(u1)) sess.add(u2) sess.flush() sess.expunge_all() eq_(u1, sess.query(User).get(u2.id))
def test_transient(self): users, addresses = (self.tables.users, self.tables.addresses) self.mapper_registry.map_imperatively( User, users, properties={"addresses": relationship(Address, backref="user")}, ) self.mapper_registry.map_imperatively(Address, addresses) sess = fixture_session() u1 = User(name="ed") u1.addresses.append(Address(email_address="*****@*****.**")) u2 = pickle.loads(pickle.dumps(u1)) sess.add(u2) sess.flush() sess.expunge_all() eq_(u1, sess.query(User).get(u2.id))
def dumps(cls, data, proto=2, *, fix_imports=True): return pickle.dumps(data, proto, fix_imports=fix_imports)
def persistent_id(obj): if hasattr(obj, '_pk') and not isinstance(obj, type): return pickle.dumps((obj.__class__, obj._pk))
def _serialize(self, *args, **kw): return pickle.dumps([args, kw])
def _serialize(self, *args, **params): return pickle.dumps([args, params])