def setUp(self): super(TestUserpics, self).setUp() users = self.users = Table('users', self.meta, Column('id', Integer, primary_key=True), Column('pic_count', Integer), Column('name', String(50)), ) userpics = self.userpics = Table('userpics', self.meta, Column('uid', Integer, primary_key=True), Column('name', String(50), primary_key=True), Column('state', String(50)), ) self.meta.create_all() class Userpic(self.EasyInit): pass class User(self.EasyInit): pass self.Userpic = Userpic self.User = User self.userpic_mapper = mapper(Userpic, userpics, extension = self.aggregator_class( a.Count(users.c.pic_count, (users.c.id == userpics.c.uid) & (userpics.c.state == "normal")), )) self.user_mapper = mapper(User, users)
def reload_mapper(metadata, now): """<comment-ja> Machine(Model)のマッパーをリロードします。 @param metadata: リロードしたいMetaData @type metadata: sqlalchemy.schema.MetaData @param now: now @type now: Datatime </comment-ja> <comment-en> TODO: English Comment </comment-en> """ t_machine = get_machine_table(metadata, now) t_machine_tag = metadata.tables['machine2tag'] t_user = metadata.tables['user'] mapper(Machine, t_machine, properties={ 'children' : relation(Machine, backref=backref('parent', remote_side=[t_machine.c.id])), 'notebook' : relation(karesansui.db.model.notebook.Notebook), 'created_user' : relation(karesansui.db.model.user.User, primaryjoin=t_machine.c.created_user_id==t_user.c.id), 'modified_user' : relation(karesansui.db.model.user.User, primaryjoin=t_machine.c.modified_user_id==t_user.c.id), 'tags' : relation(karesansui.db.model.tag.Tag, secondary=t_machine_tag, backref="machine"), })
def setup_mappers(cls): foo = cls.tables.foo subfoo = cls.tables.subfoo mapper(Foo, foo) mapper(SubFoo, subfoo, inherits=Foo) MutableDict.associate_with_attribute(Foo.data)
def setUp(self): super(TestBlog, self).setUp() stats = self.stats = Table('stats', self.meta, Column( 'date', Date, primary_key=True), Column( 'posts_so_far', Integer), ) blog = self.blog = Table('blog', self.meta, Column( 'id', Integer, primary_key=True), Column( 'date', Date, index=True), Column( 'text', Text), ) self.meta.create_all() class BlogEntry(self.EasyInit): pass class StatRow(self.EasyInit): pass self.BlogEntry = BlogEntry self.StatRow = StatRow self.blog_mapper = mapper(BlogEntry, blog, extension=self.aggregator_class( a.Count(stats.c.posts_so_far, stats.c.date >= blog.c.date), )) self.stats_mapper = mapper(StatRow, stats)
def test_get_pk_w_null(self): """test the re-implementation of logic to do get with IS NULL.""" class AddressUser(object): pass mapper( AddressUser, self.tables.users.outerjoin(self.tables.addresses), properties={ "id": self.tables.users.c.id, "address_id": self.tables.addresses.c.id } ) bq = self.bakery(lambda s: s.query(AddressUser)) sess = Session() def go(): u1 = bq(sess).get((10, None)) eq_(u1.name, 'chuck') self.assert_sql_count(testing.db, go, 1) u1 = sess.query(AddressUser).get((10, None)) # noqa def go(): u2 = bq(sess).get((10, None)) eq_(u2.name, 'chuck') self.assert_sql_count(testing.db, go, 0)
def prepare_models(): tables = get_tables() models = get_django_models() sa_models = getattr(Cache, 'models', {}) for model in models: name = model._meta.db_table mixin = getattr(model, 'aldjemy_mixin', None) bases = (mixin, BaseSQLAModel) if mixin else (BaseSQLAModel, ) table = tables[name] # because querying happens on sqlalchemy side, we can use only one # type of queries for alias, so we use 'read' type sa_models[name] = type(model._meta.object_name, bases, {'table': table, 'alias': router.db_for_read(model)}) for model in models: name = model._meta.db_table if 'id' not in sa_models[name].__dict__: table = tables[name] attrs = _extract_model_attrs(model, sa_models) name = model._meta.db_table orm.mapper(sa_models[name], table, attrs) model.sa = sa_models[name] Cache.models = sa_models
def test_relationship(self): pjoin = polymorphic_union( {"manager": managers_table, "engineer": engineers_table}, "type", "pjoin", ) mapper( Company, companies, properties={"employees": relationship(Employee)}, ) employee_mapper = mapper(Employee, pjoin, polymorphic_on=pjoin.c.type) manager_mapper = mapper( Manager, managers_table, inherits=employee_mapper, concrete=True, polymorphic_identity="manager", ) engineer_mapper = mapper( Engineer, engineers_table, inherits=employee_mapper, concrete=True, polymorphic_identity="engineer", ) session = create_session() c = Company() c.employees.append(Manager("Tom", "knows how to manage things")) c.employees.append(Engineer("Kurt", "knows how to hack")) session.add(c) session.flush() session.expunge_all() def go(): c2 = session.query(Company).get(c.id) assert set([repr(x) for x in c2.employees]) == set( [ "Engineer Kurt knows how to hack", "Manager Tom knows how to manage things", ] ) self.assert_sql_count(testing.db, go, 2) session.expunge_all() def go(): c2 = ( session.query(Company) .options(joinedload(Company.employees)) .get(c.id) ) assert set([repr(x) for x in c2.employees]) == set( [ "Engineer Kurt knows how to hack", "Manager Tom knows how to manage things", ] ) self.assert_sql_count(testing.db, go, 1)
def _test_load_only_propagate(self, use_load): User = self.classes.User Address = self.classes.Address users = self.tables.users addresses = self.tables.addresses mapper(User, users, properties={ "addresses": relationship(Address) }) mapper(Address, addresses) sess = create_session() expected = [ ("SELECT users.id AS users_id, users.name AS users_name " "FROM users WHERE users.id IN (:id_1, :id_2)", {'id_2': 8, 'id_1': 7}), ("SELECT addresses.id AS addresses_id, " "addresses.email_address AS addresses_email_address " "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 7}), ("SELECT addresses.id AS addresses_id, " "addresses.email_address AS addresses_email_address " "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 8}), ] if use_load: opt = Load(User).defaultload(User.addresses).load_only("id", "email_address") else: opt = defaultload(User.addresses).load_only("id", "email_address") q = sess.query(User).options(opt).filter(User.id.in_([7, 8])) def go(): for user in q: user.addresses self.sql_eq_(go, expected)
def _setup_mappers(self, tables, mappers): mappers['test_users'] = orm.mapper(User, tables['test_users'], properties={ 'skills': orm.relation(Skill, primaryjoin=tables['test_users'].columns['id'] == tables['test_skills'].columns['user_id']), }) mappers['test_skills'] = orm.mapper(Skill, tables['test_skills'])
def test_undefer_group(self): orders, Order = self.tables.orders, self.classes.Order mapper(Order, orders, properties=util.OrderedDict([ ('userident', deferred(orders.c.user_id, group='primary')), ('description', deferred(orders.c.description, group='primary')), ('opened', deferred(orders.c.isopen, group='primary')) ] )) sess = create_session() q = sess.query(Order).order_by(Order.id) def go(): l = q.options(undefer_group('primary')).all() o2 = l[2] eq_(o2.opened, 1) eq_(o2.userident, 7) eq_(o2.description, 'order 3') self.sql_eq_(go, [ ("SELECT orders.user_id AS orders_user_id, " "orders.description AS orders_description, " "orders.isopen AS orders_isopen, " "orders.id AS orders_id, " "orders.address_id AS orders_address_id " "FROM orders ORDER BY orders.id", {})])
def test_locates_col_rowproc_only(self): """changed in 1.0 - we don't search for deferred cols in the result now. Because the loading for ORM Query and Query from a core select is now split off, we test loading from a plain select() separately. """ orders, Order = self.tables.orders, self.classes.Order mapper(Order, orders, properties={ 'description': deferred(orders.c.description)}) sess = create_session() stmt = sa.select([Order]).order_by(Order.id) o1 = (sess.query(Order). from_statement(stmt).all())[0] def go(): eq_(o1.description, 'order 1') # prior to 1.0 we'd search in the result for this column # self.sql_count_(0, go) self.sql_count_(1, go)
def test_basic(self): """A basic deferred load.""" Order, orders = self.classes.Order, self.tables.orders mapper(Order, orders, order_by=orders.c.id, properties={ 'description': deferred(orders.c.description)}) o = Order() self.assert_(o.description is None) q = create_session().query(Order) def go(): l = q.all() o2 = l[2] x = o2.description self.sql_eq_(go, [ ("SELECT orders.id AS orders_id, " "orders.user_id AS orders_user_id, " "orders.address_id AS orders_address_id, " "orders.isopen AS orders_isopen " "FROM orders ORDER BY orders.id", {}), ("SELECT orders.description AS orders_description " "FROM orders WHERE orders.id = :param_1", {'param_1':3})])
def test_options(self): """Options on a mapper to create deferred and undeferred columns""" orders, Order = self.tables.orders, self.classes.Order mapper(Order, orders) sess = create_session() q = sess.query(Order).order_by(Order.id).options(defer('user_id')) def go(): q.all()[0].user_id self.sql_eq_(go, [ ("SELECT orders.id AS orders_id, " "orders.address_id AS orders_address_id, " "orders.description AS orders_description, " "orders.isopen AS orders_isopen " "FROM orders ORDER BY orders.id", {}), ("SELECT orders.user_id AS orders_user_id " "FROM orders WHERE orders.id = :param_1", {'param_1':1})]) sess.expunge_all() q2 = q.options(undefer('user_id')) self.sql_eq_(q2.all, [ ("SELECT orders.id AS orders_id, " "orders.user_id AS orders_user_id, " "orders.address_id AS orders_address_id, " "orders.description AS orders_description, " "orders.isopen AS orders_isopen " "FROM orders ORDER BY orders.id", {})])
def test_unless_sqla(self): from sqlalchemy import (MetaData, Table, Column, Integer, String) from sqlalchemy.orm import create_session, mapper metadata = MetaData('sqlite:///:memory:') testtable = Table('test1', metadata, Column('id', Integer, primary_key=True), Column('val', String(8))) metadata.create_all() class Test(object): pass mapper(Test, testtable) testtable.insert().execute({'id': 1, 'val': 'bob'}) testtable.insert().execute({'id': 2, 'val': 'bobby'}) testtable.insert().execute({'id': 3, 'val': 'alberto'}) sess = create_session() getunless = unless(sess.query(Test).get) x = getunless(1) assert x.val == 'bob', x x = getunless(2) assert x.val == 'bobby', x assert_raises(ValueError, getunless, 5) assert_raises(TGValidationError, Convert(getunless).to_python, '5') x = Convert(getunless).to_python('1') assert x.val == 'bob', x
def go(): class A(fixtures.ComparableEntity): pass class B(A): pass mapper( A, table1, polymorphic_on=table1.c.col2, polymorphic_identity="a", ) mapper(B, table2, inherits=A, polymorphic_identity="b") sess = create_session() a1 = A() a2 = A() b1 = B(col3="b1") b2 = B(col3="b2") for x in [a1, a2, b1, b2]: sess.add(x) sess.flush() sess.expunge_all() alist = sess.query(A).order_by(A.col1).all() eq_([A(), A(), B(col3="b1"), B(col3="b2")], alist) for a in alist: sess.delete(a) sess.flush() # don't need to clear_mappers() del B del A
def setup_mappers(cls): foobars = cls.tables.foobars class Foobar(cls.Comparable): pass class FBComposite(cls.Comparable): def __init__(self, x1, x2, x3, x4): self.goofy_x1 = x1 self.x2 = x2 self.x3 = x3 self.x4 = x4 def __composite_values__(self): return self.goofy_x1, self.x2, self.x3, self.x4 __hash__ = None def __eq__(self, other): return other.goofy_x1 == self.goofy_x1 and \ other.x2 == self.x2 and \ other.x3 == self.x3 and \ other.x4 == self.x4 def __ne__(self, other): return not self.__eq__(other) def __repr__(self): return "FBComposite(%r, %r, %r, %r)" % ( self.goofy_x1, self.x2, self.x3, self.x4 ) mapper(Foobar, foobars, properties=dict( foob=sa.orm.composite(FBComposite, foobars.c.x1, foobars.c.x2, foobars.c.x3, foobars.c.x4) ))
def setup_mappers(cls): a, b = cls.tables.a, cls.tables.b class A(cls.Comparable): pass class B(cls.Comparable): pass class C(cls.Comparable): def __init__(self, b1, b2): self.b1, self.b2 = b1, b2 def __composite_values__(self): return self.b1, self.b2 def __eq__(self, other): return isinstance(other, C) and \ other.b1 == self.b1 and \ other.b2 == self.b2 mapper(A, a, properties={ 'b2':relationship(B), 'c':composite(C, 'b1', 'b2') }) mapper(B, b)
def __init__(self): #self.engine = create_engine('sqlite://///home/weis/code/baltic_rallye_code/common/sequences.db') self.engine = create_engine('sqlite://///home/pi/baltic_rallye_code/common/sequences.db') #self.engine = create_engine('sqlite://///home/weis/Desktop/baltic_rallye_code/common/sequences.db') #self.engine = create_engine('sqlite://///home/weis/code/gpslapse/common/sequences.db') self.base = declarative_base() self.meta = MetaData(bind=self.engine) Session = sessionmaker(bind=self.engine) self.session = Session() # first, get meta-data (structure and datatypes from all tables self.sequences = Table('sequences', self.meta, autoload=True, autoload_with=self.engine) self.sensors = Table('sensors', self.meta, autoload=True, autoload_with=self.engine) self.frames = Table('frames', self.meta, autoload=True, autoload_with=self.engine) self.annot_cars = Table('annot_cars', self.meta, autoload=True, autoload_with=self.engine) self.annot_taillights = Table('annot_taillights', self.meta, autoload=True, autoload_with=self.engine) # now, map the dummy-classes above to the tables, they inherit all fields # and can be used to insert or query from the database orm.mapper(self.Sensor, self.sensors) orm.mapper(self.Sequence, self.sequences) orm.mapper(self.Frame, self.frames) orm.mapper(self.AnnotCar, self.annot_cars) orm.mapper(self.AnnotTaillight, self.annot_taillights)
def setup_mappers(cls): graphs, edges = cls.tables.graphs, cls.tables.edges class Point(cls.Comparable): def __init__(self, x, y): self.x = x self.y = y def __composite_values__(self): return [self.x, self.y] __hash__ = None def __eq__(self, other): return isinstance(other, Point) and \ other.x == self.x and \ other.y == self.y def __ne__(self, other): return not isinstance(other, Point) or \ not self.__eq__(other) class Graph(cls.Comparable): pass class Edge(cls.Comparable): def __init__(self, *args): if args: self.start, self.end = args mapper(Graph, graphs, properties={ 'edges':relationship(Edge) }) mapper(Edge, edges, properties={ 'start':sa.orm.composite(Point, edges.c.x1, edges.c.y1), 'end': sa.orm.composite(Point, edges.c.x2, edges.c.y2) })
def test_weakref_pickled(self): users, User = self.tables.users, pickleable.User s = create_session() mapper(User, users) s.add(User(name='ed')) s.flush() assert not s.dirty user = s.query(User).one() user.name = 'fred' s.expunge(user) u2 = pickle.loads(pickle.dumps(user)) del user s.add(u2) del u2 gc_collect() assert len(s.identity_map) == 1 assert len(s.dirty) == 1 assert None not in s.dirty s.flush() gc_collect() assert not s.dirty assert not s.identity_map
def generate_contrib_model(fields): fnames = [f['name'] for f in fields] class Contributor(object): __tablename__ = "contributors" def __init__(self,csvname,**kwargs): self.csvname = csvname for k in kwargs: if k in fnames: setattr(self,k,kwargs[k]) columns = [] for f in fields: columns.append(db.Column(f['name'],eval("db.%s" % f['db_type']), primary_key=f['primary_key'])) for f in ['facebook_username','twitter_username','linkedin_username']: columns.append(db.Column(f,db.String, unique=True)) metadata = db.metadata table = Table(Contributor.__tablename__, metadata, *columns) properties = {} for column in columns: properties[column.name] = column mapper(Contributor, table, properties=properties) return Contributor
def test_expunge_cascade(self): Address, addresses, users, User = (self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User) mapper(Address, addresses) mapper(User, users, properties={ 'addresses': relationship(Address, backref=backref("user", cascade="all"), cascade="all")}) session = create_session() u = session.query(User).filter_by(id=7).one() # get everything to load in both directions print([a.user for a in u.addresses]) # then see if expunge fails session.expunge(u) assert sa.orm.object_session(u) is None assert sa.orm.attributes.instance_state(u).session_id is None for a in u.addresses: assert sa.orm.object_session(a) is None assert sa.orm.attributes.instance_state(a).session_id is None
def test_explicit_expunge_deleted(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = Session() sess.add(User(name='x')) sess.commit() u1 = sess.query(User).first() sess.delete(u1) sess.flush() assert was_deleted(u1) assert u1 not in sess assert object_session(u1) is sess sess.expunge(u1) assert was_deleted(u1) assert u1 not in sess assert object_session(u1) is None sess.rollback() assert was_deleted(u1) assert u1 not in sess assert object_session(u1) is None
def test_deleted_flag(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = sessionmaker()() u1 = User(name='u1') sess.add(u1) sess.commit() sess.delete(u1) sess.flush() assert u1 not in sess assert_raises(sa.exc.InvalidRequestError, sess.add, u1) sess.rollback() assert u1 in sess sess.delete(u1) sess.commit() assert u1 not in sess assert_raises(sa.exc.InvalidRequestError, sess.add, u1) make_transient(u1) sess.add(u1) sess.commit() eq_(sess.query(User).count(), 1)
def test_autoflush_expressions(self): """test that an expression which is dependent on object state is evaluated after the session autoflushes. This is the lambda inside of strategies.py lazy_clause. """ users, Address, addresses, User = (self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses': relationship(Address, backref="user")}) mapper(Address, addresses) sess = create_session(autoflush=True, autocommit=False) u = User(name='ed', addresses=[Address(email_address='foo')]) sess.add(u) eq_(sess.query(Address).filter(Address.user == u).one(), Address(email_address='foo')) # still works after "u" is garbage collected sess.commit() sess.close() u = sess.query(User).get(u.id) q = sess.query(Address).filter(Address.user == u) del u gc_collect() eq_(q.one(), Address(email_address='foo'))
def test_auto_detach_on_gc_session(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = Session() u1 = User(name='u1') sess.add(u1) sess.commit() # can't add u1 to Session, # already belongs to u2 s2 = Session() assert_raises_message( sa.exc.InvalidRequestError, r".*is already attached to session", s2.add, u1 ) # garbage collect sess del sess gc_collect() # s2 lets it in now despite u1 having # session_key s2.add(u1) assert u1 in s2
def setup_mappers(cls): user, User = cls.tables.user, cls.classes.User address, Address = cls.tables.address, cls.classes.Address mapper(User, user, properties={ 'addresses': relationship(Address, backref="user") }) mapper(Address, address)
def test_autoflush_rollback(self): Address, addresses, users, User = (self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User) mapper(Address, addresses) mapper(User, users, properties={ 'addresses': relationship(Address)}) sess = create_session(autocommit=False, autoflush=True) u = sess.query(User).get(8) newad = Address(email_address='a new address') u.addresses.append(newad) u.name = 'some new name' assert u.name == 'some new name' assert len(u.addresses) == 4 assert newad in u.addresses sess.rollback() assert u.name == 'ed' assert len(u.addresses) == 3 assert newad not in u.addresses # pending objects don't get expired assert newad.email_address == 'a new address'
def test_weakref_with_cycles_o2o(self): Address, addresses, users, User = (self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User) s = sessionmaker()() mapper(User, users, properties={ "address": relationship(Address, backref="user", uselist=False) }) mapper(Address, addresses) s.add(User(name="ed", address=Address(email_address="ed1"))) s.commit() user = s.query(User).options(joinedload(User.address)).one() user.address.user eq_(user, User(name="ed", address=Address(email_address="ed1"))) del user gc_collect() assert len(s.identity_map) == 0 user = s.query(User).options(joinedload(User.address)).one() user.address.email_address = 'ed2' user.address.user # lazyload del user gc_collect() assert len(s.identity_map) == 2 s.commit() user = s.query(User).options(joinedload(User.address)).one() eq_(user, User(name="ed", address=Address(email_address="ed2")))
def test_weakref(self): """test the weak-referencing identity map, which strongly- references modified items.""" users, User = self.tables.users, self.classes.User s = create_session() mapper(User, users) s.add(User(name='ed')) s.flush() assert not s.dirty user = s.query(User).one() del user gc_collect() assert len(s.identity_map) == 0 user = s.query(User).one() user.name = 'fred' del user gc_collect() assert len(s.identity_map) == 1 assert len(s.dirty) == 1 assert None not in s.dirty s.flush() gc_collect() assert not s.dirty assert not s.identity_map user = s.query(User).one() assert user.name == 'fred' assert s.identity_map
def test_nesting_with_functions(self): Stat, Foo, stats, foo, Data, datas = ( self.classes.Stat, self.classes.Foo, self.tables.stats, self.tables.foo, self.classes.Data, self.tables.datas, ) mapper(Data, datas) mapper( Foo, foo, properties={ "data": relationship(Data, backref=backref("foo", uselist=False)) }, ) mapper(Stat, stats, properties={"data": relationship(Data)}) session = create_session() data = [Data(a=x) for x in range(5)] session.add_all(data) session.add_all(( Stat(data=data[0], somedata=1), Stat(data=data[1], somedata=2), Stat(data=data[2], somedata=3), Stat(data=data[3], somedata=4), Stat(data=data[4], somedata=5), Stat(data=data[0], somedata=6), Stat(data=data[1], somedata=7), Stat(data=data[2], somedata=8), Stat(data=data[3], somedata=9), Stat(data=data[4], somedata=10), )) session.flush() arb_data = sa.select( [stats.c.data_id, sa.func.max(stats.c.somedata).label("max")], stats.c.data_id <= 5, group_by=[stats.c.data_id], ) arb_result = arb_data.execute().fetchall() # order the result list descending based on 'max' arb_result.sort(key=lambda a: a._mapping["max"], reverse=True) # extract just the "data_id" from it arb_result = [row._mapping["data_id"] for row in arb_result] arb_data = arb_data.alias("arb") # now query for Data objects using that above select, adding the # "order by max desc" separately q = (session.query(Data).options(sa.orm.joinedload("foo")).select_from( datas.join(arb_data, arb_data.c.data_id == datas.c.id)).order_by( sa.desc(arb_data.c.max)).limit(10)) # extract "data_id" from the list of result objects verify_result = [d.id for d in q] eq_(verify_result, arb_result)
def map_generated_classes(engine, generated_classes, relationship_lazy = 'select', numpy_storage_engine = 'sqltable', compress = False, hfile = None): """ This function map all classes to the db connected with engine. :param engine: a sqlalchemy Engine. :param generated_classes: is a list (return by create_classes_from_schema_sniffing) of classes. Each class hold some attributes for helping the mapper: * genclass.tablename * genclass.neoclass * genclass.usable_attributes * genclass.one_to_many_relationship * genclass.many_to_one_relationship * genclass.many_to_many_relationship :param relationship_lazy: see open_db :param numpy_storage_engine: see open_db :param compress: see open_db """ metadata = MetaData(bind = engine) metadata.reflect() nptable = metadata.tables['NumpyArrayTable'] NumpyArrayTableClass = type('NumpyArrayTableClass', (object,), {}) orm.mapper(NumpyArrayTableClass , nptable , properties = { 'blob':orm.deferred( nptable.c['blob']) }) # class by tablename tablename_to_class = { } for genclass in generated_classes: tablename_to_class[genclass.tablename] = genclass #~ for tablename, genclass in tablename_to_class.items(): for genclass in generated_classes: table = metadata.tables[genclass.tablename] for parentname in genclass.many_to_one_relationship : table.c[parentname.lower()+'_id'].append_foreign_key( ForeignKey(parentname+'.id') ) for attrname, attrtype in list(genclass.usable_attributes.items()): if attrtype == np.ndarray or attrtype == pq.Quantity: if attrtype == np.ndarray: id_name = '_numpy_id' elif attrtype == pq.Quantity: id_name = '_quantity_id' table.c[attrname+id_name].append_foreign_key( ForeignKey('NumpyArrayTable.id') ) for genclass in generated_classes: #~ for classname, class_ in generated_classes.items(): table = metadata.tables[genclass.tablename] properties = { } # deferred loading for numpy or Quantities fields #~ for attrname, attrtype in genclass.usable_attributes.items(): #~ if attrtype == np.ndarray or attrtype == pq.Quantity: # FIXME: think about this : _shape defered or not #~ properties[attrname+'_shape'] = orm.deferred( table.columns[attrname+'_shape'] , group = attrname) #~ properties[attrname+'_dtype'] = orm.deferred( table.columns[attrname+'_dtype'] , group = attrname) #properties[attrname+'_blob'] = orm.deferred( table.columns[attrname+'_blob'] , group = attrname) #~ properties[attrname+'_blob'] = orm.deferred( table.columns[attrname+'_blob'] , ) #~ if attrtype == pq.Quantity: #~ properties[attrname+'_units'] = orm.deferred( table.columns[attrname+'_units'] , group = attrname) # one to many relationship for childname in genclass.one_to_many_relationship: #~ print genclass.tablename, childname properties[childname.lower()+'s'] = orm.relationship(tablename_to_class[childname], primaryjoin = table.c.id==metadata.tables[childname].c[table.name.lower()+'_id'], #~ order_by = metadata.tables[childname].c['id'], #~ order_by = 'name', backref=orm.backref(table.name.lower()), #FIXME: cascade="all, delete", #FIXME: lazy = relationship_lazy, ) # many to many relationship for tablename2 in genclass.many_to_many_relationship: if table.name>tablename2: # in other case is done with bacref xref = table.name+'XREF'+tablename2 xreftable =metadata.tables[xref] properties[tablename2.lower()+'s'] = orm.relationship(tablename_to_class[tablename2], primaryjoin = table.c.id==xreftable.c[table.name.lower()+'_id'], secondary = xreftable, secondaryjoin = metadata.tables[tablename2].c.id==xreftable.c[tablename2.lower()+'_id'], lazy = ((relationship_lazy != "immediate") and relationship_lazy) or 'select', foreign_keys = [xreftable.c[table.name.lower()+'_id'], xreftable.c[tablename2.lower()+'_id']], backref = orm.backref(table.name.lower()+'s'), ) # one to one relationship with NumpyArrayTable for attrname, attrtype in list(genclass.usable_attributes.items()): if attrtype == np.ndarray or attrtype == pq.Quantity: if attrtype == np.ndarray: id_name = '_numpy_id' elif attrtype == pq.Quantity: id_name = '_quantity_id' properties['NumpyArrayTable__'+attrname] = orm.relationship(NumpyArrayTableClass, primaryjoin = nptable.c.id == table.c['{}{}'.format(attrname, id_name)], cascade="all, delete", #~ lazy = 'select', #~ lazy = 'immediate', lazy = 'joined', #~ lazy = 'dynamic',# invalid uselist=False, #~ foreign_keys = nptable.c.id, ) orm.mapper(genclass , table , properties = properties , ) # magic reconstruction for np.ndarray pq.Quantity (pq.Quantity scalar) for attrname, attrtype in list(genclass.usable_attributes.items()): if attrtype == np.ndarray or attrtype == pq.Quantity: if numpy_storage_engine == 'sqltable': np_dyn_load = SQL_NumpyArrayPropertyLoader(attrname, arraytype =attrtype, compress = compress, NumpyArrayTableClass = NumpyArrayTableClass) elif numpy_storage_engine == 'hdf5': np_dyn_load = HDF5_NumpyArrayPropertyLoader(attrname, arraytype =attrtype, hfile = hfile) setattr(genclass, attrname, property( fget = np_dyn_load.fget, fset = np_dyn_load.fset )) return metadata
from sqlalchemy.orm import mapper, synonym, relationship, backref import quactrl.models.core as core import quactrl.models.quality as qua import quactrl.models.products as prod import quactrl.models.operations as op import quactrl.data.sqlalchemy.tables as tables from quactrl.data.sqlalchemy.mappers.core import (resource_relationship, item_relationship) mapper(qua.Defect, inherits=core.Item, polymorphic_identity='defect', properties={'failure_mode': synonym('resource')}) mapper(qua.Measurement, inherits=core.Item, polymorphic_identity='measurement', properties={'characteristic': synonym('resource')}) mapper(qua.Subject, inherits=core.UnitaryItem, polymorphic_identity='subject', properties={ 'measurements': item_relationship(qua.Measurement, backref=backref('subject', uselist=False)), 'defects': item_relationship(qua.Defect, backref=backref('subject', uselist=False)) })
def setup_mappers(cls): data = cls.tables.data mapper(cls.classes.Data, data, properties={'cnt': data.c.counter})
def setup_mappers(cls): User = cls.classes.User users = cls.tables.users mapper(User, users)
from sqlalchemy.orm import mapper from sqlalchemy.types import String from meta import metadata ## Settings table t_settings = Table('settings', metadata, Column('name', String(32), primary_key=True), Column('val', String(128)), ) class Setting(object): pass mapper(Setting, t_settings) class Settings(object): INTS = ["largestcarnumber", "useevents", "minevents"] BOOLS = ["locked", "superuniquenumbers", "indexafterpenalties", "usepospoints"] STRS = ["pospointlist", "champsorting", "seriesname", "sponsorlink", "schema", "parentseries", "classinglink"] FLOATS = [] def __init__(self): self.locked = False self.superuniquenumbers = False self.indexafterpenalties = False self.usepospoints = False
""" List for easy rendering Automatically prints the contained tags separated by commas:: >>> tags = TagList(['abc', 'def', 'ghi']) >>> tags abc, def, ghi """ def __unicode__(self): return ', '.join([tag.name for tag in self.values()]) mapper(Tag, tags, order_by=tags.c.name, extension=events.MapperObserver(events.Tag)) excess_whitespace = re.compile('\s\s+', re.M) def extract_tags(string): """Convert a comma separated string into a list of tag names. NOTE: The space-stripping here is necessary to patch a leaky abstraction. MySQL's string comparison with varchar columns is pretty fuzzy when it comes to space characters, and is even inconsistent between versions. We strip all preceding/trailing/duplicated spaces to be safe. """
schema_name=Config.DB_SCHEMA_MD) as session: commission_rate_obj = CommissionRateModel() for name, val in commission_rate_dic.items(): setattr(commission_rate_obj, name, val) # instrument_obj_list.append(instrument_obj) session.merge(commission_rate_obj) # 提交 commit session.commit() # 稍后提到 for循环外面 try: instrument_commission_rate_table = Table( 'instrumentcommissionrate', MetaData(Config.get_db_engine(Config.DB_SCHEMA_MD)), autoload=True) mapper(CommissionRateModel, instrument_commission_rate_table) except: # logging.exception('mapping error') pass if __name__ == "__main__": engine = Config.get_db_engine(Config.DB_SCHEMA_QABAT) # metadata = MetaData(engine) BaseModel.metadata.create_all(engine) with Config.with_db_session(engine=engine) as session: for table_name, _ in BaseModel.metadata.tables.items(): sql_str = "ALTER TABLE %s ENGINE = MyISAM" % table_name session.execute(sql_str) print("所有表结构建立完成") # 创建user表,继承metadata类 # Engine使用Schama Type创建一个特定的结构对象
def setup_mappers(cls): mapper(cls.classes.Data, cls.tables.data, properties={ 'others': relationship(cls.classes.Other) }) mapper(cls.classes.Other, cls.tables.other)
BASEV2.metadata, # We just need sqlalchemy to think these are primary keys sa.Column("port_id", sa.String(36), sa.ForeignKey("quark_ports.id"), nullable=False, primary_key=True), sa.Column("ip_address_id", sa.String(36), sa.ForeignKey("quark_ip_addresses.id"), nullable=False, primary_key=True), sa.Column("enabled", sa.Boolean(), default=True, nullable=False, server_default='1'), sa.Column("service", sa.String(255), default='none', nullable=True, server_default='none'), **TABLE_KWARGS) orm.mapper(PortIpAssociation, port_ip_association_table) class IPAddress(BASEV2, models.HasId): """More closely emulate the melange version of the IP table. We always mark the record as deallocated rather than deleting it. Gives us an IP address owner audit log for free, essentially. """ __tablename__ = "quark_ip_addresses" __table_args__ = (sa.UniqueConstraint("subnet_id", "address", name="subnet_id_address"), TABLE_KWARGS) address_readable = sa.Column(sa.String(128), nullable=False) address = sa.Column(custom_types.INET(), nullable=False, index=True) subnet_id = sa.Column(sa.String(36),
def gen_sqla_info(cls, cls_bases=()): """Return SQLAlchemy table object corresponding to the passed Spyne object. Also maps given class to the returned table. """ metadata = cls.Attributes.sqla_metadata table_name = cls.Attributes.table_name inc = [] # include_properties # check inheritance inheritance = None base_class = getattr(cls, '__extends__', None) if base_class is None: for b in cls_bases: if getattr(b, '_type_info', None) is not None and b.__mixin__: base_class = b if base_class is not None: base_table_name = base_class.Attributes.table_name if base_table_name is not None: if base_table_name == table_name: inheritance = _SINGLE else: inheritance = _JOINED raise NotImplementedError( "Joined table inheritance is not yet " "implemented.") inc_prop = base_class.Attributes.sqla_mapper.include_properties if inc_prop is not None: inc.extend(inc_prop) exc_prop = base_class.Attributes.sqla_mapper.exclude_properties if exc_prop is not None: inc = [_p for _p in inc if not _p in exc_prop] # check whether the object already has a table table = None if table_name in metadata.tables: table = metadata.tables[table_name] else: # We need FakeTable because table_args can contain all sorts of stuff # that can require a fully-constructed table, and we don't have that # information here yet. table = _FakeTable() # check whether the base classes are already mapped base_mapper = None if base_class is not None: base_mapper = base_class.Attributes.sqla_mapper if base_mapper is None: for b in cls_bases: bm = _mapper_registry.get(b, None) if bm is not None: assert base_mapper is None, "There can be only one base mapper." base_mapper = bm inheritance = _SINGLE props = {} # For each Spyne field for k, v in cls._type_info.items(): if v.Attributes.exc_table: continue col_args, col_kwargs = sanitize_args(v.Attributes.sqla_column_args) _sp_attrs_to_sqla_constraints(cls, v, col_kwargs) t = get_sqlalchemy_type(v) if t is None: p = getattr(v.Attributes, 'store_as', None) if p is not None and issubclass(v, Array) and isinstance( p, c_table): child_cust, = v._type_info.values() if child_cust.__orig__ is not None: child = child_cust.__orig__ else: child = child_cust if p.multi != False: # many to many col_own, col_child = _get_cols_m2m(cls, k, v, p.left, p.right) p.left = col_own.key p.right = col_child.key if p.multi == True: rel_table_name = '_'.join( [cls.Attributes.table_name, k]) else: rel_table_name = p.multi # FIXME: Handle the case where the table already exists. rel_t = Table(rel_table_name, metadata, *(col_own, col_child)) props[k] = relationship(child, secondary=rel_t, backref=p.backref) else: # one to many assert p.left is None, "'left' is ignored in one-to-many " \ "relationships. You probebly meant " \ "to use 'right'." child_t = child.__table__ _gen_col = _get_col_o2m(cls, p.right) col_info = _gen_col.next() # gets the column name p.right, col_type = col_info[ 0] # FIXME: Add support for multi-column primary keys. if p.right in child_t.c: # FIXME: This branch MUST be tested. assert col_type == child_t.c[p.right].type # if the column is there, the decision about whether # it should be in child's mapper should also have been # made. # # so, not adding the child column to to child mapper # here. else: col = _gen_col.next() _sp_attrs_to_sqla_constraints(cls, child_cust, col=col) child_t.append_column(col) child.__mapper__.add_property(col.name, col) props[k] = relationship(child) elif p is not None and issubclass(v, ComplexModelBase): # v has the Attribute values we need whereas real_v is what the # user instantiates (thus what sqlalchemy needs) if v.__orig__ is None: # vanilla class real_v = v else: # customized class real_v = v.__orig__ if isinstance(p, c_table): assert not getattr(p, 'multi', False), ( 'Storing a single element-type using a ' 'relation table is pointless.') assert p.right is None, "'right' is ignored in a one-to-one " \ "relationship" col = _get_col_o2o(cls, k, v, p.left) rel = relationship(real_v, uselist=False) p.left = col.key props[k] = rel elif isinstance(p, c_xml): if k in table.c: col = table.c[k] else: col = Column(k, PGObjectXml(v, p.root_tag, p.no_ns), *col_args, **col_kwargs) elif isinstance(p, c_json): if k in table.c: col = table.c[k] else: col = Column( k, PGObjectJson(v, ignore_wrappers=p.ignore_wrappers, complex_as=p.complex_as), *col_args, **col_kwargs) elif isinstance(p, c_msgpack): raise NotImplementedError() else: raise ValueError(p) props[col.name] = col if not k in table.c: table.append_column(col) else: logger.debug( "Skipping %s.%s.%s: %r, store_as: %r" % (cls.get_namespace(), cls.get_type_name(), k, v, p)) else: unique = v.Attributes.unique index = v.Attributes.index if unique and not index: index = True try: index_name, index_method = v.Attributes.index except (TypeError, ValueError): index_name = "%s_%s%s" % (table_name, k, '_unique' if unique else '') index_method = v.Attributes.index if k in table.c: col = table.c[k] else: col = Column(k, t, *col_args, **col_kwargs) table.append_column(col) if index in (False, None): pass else: if index == True: index_args = (index_name, col), dict(unique=unique) else: index_args = (index_name, col), dict(unique=unique, postgresql_using=index_method) if isinstance(table, _FakeTable): table.indexes.append(index_args) else: Index(*index_args[0], **index_args[1]) if not v.Attributes.exc_mapper: props[k] = col if isinstance(table, _FakeTable): _table = table table_args, table_kwargs = sanitize_args( cls.Attributes.sqla_table_args) table = Table(table_name, metadata, *(tuple(table.columns) + table_args), **table_kwargs) for index_args, index_kwargs in _table.indexes: Index(*index_args, **index_kwargs) del _table # Map the table to the object mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) _props = mapper_kwargs.get('properties', None) if _props is None: mapper_kwargs['properties'] = props else: props.update(_props) mapper_kwargs['properties'] = props _inc = mapper_kwargs.get('include_properties', None) if _inc is None: mapper_kwargs['include_properties'] = inc + props.keys() po = mapper_kwargs.get('polymorphic_on', None) if po is not None: if not isinstance(po, Column): mapper_kwargs['polymorphic_on'] = table.c[po] else: del mapper_kwargs['polymorphic_on'] if base_mapper is not None: mapper_kwargs['inherits'] = base_mapper if inheritance is not _SINGLE: mapper_args = (table, ) + mapper_args cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) def my_load_listener(target, context): for k, v in cls.get_flat_type_info(cls).items(): av = getattr(target, k, None) if isclass(av) and issubclass(av, ModelBase): setattr(target, k, None) event.listen(cls, 'load', my_load_listener) cls.__tablename__ = cls.Attributes.table_name cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper cls.Attributes.sqla_table = cls.__table__ = table return table
import os import argparse import hashlib import shutil import sys import dpf_model as dbm from sqlalchemy import create_engine from sqlalchemy import func from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import mapper from dpf_model import HashTable mapper(dbm.HashTable, dbm.table_hashes) BLOCK_SIZE = 1024 * 1024 # one megabyte MEDIA_EXTENSIONS = ['avi', 'mp3', 'mp4', 'mkv', 'webm', 'mpg', 'jpg', 'png'] DOCS_EXTENSIONS = [ 'pdf', 'doc', 'docx', 'xls', 'xlsx', 'numbers', 'pages', 'djvu', 'txt', 'epub' ] TRASHBIN = os.path.expanduser('~/.Trash') class Duplifinder(): def __init__(self, path, path_to_be_excluded): ''' Current limitations: only one excluded folders supported. '''
def test_one(self): """Post_update only fires off when needed. This test case used to produce many superfluous update statements, particularly upon delete """ node, Node = self.tables.node, self.classes.Node mapper( Node, node, properties={ 'children': relationship(Node, primaryjoin=node.c.id == node.c.parent_id, cascade="all", backref=backref("parent", remote_side=node.c.id)), 'prev_sibling': relationship(Node, primaryjoin=node.c.prev_sibling_id == node.c.id, remote_side=node.c.id, uselist=False), 'next_sibling': relationship(Node, primaryjoin=node.c.next_sibling_id == node.c.id, remote_side=node.c.id, uselist=False, post_update=True) }) session = create_session() def append_child(parent, child): if parent.children: parent.children[-1].next_sibling = child child.prev_sibling = parent.children[-1] parent.children.append(child) def remove_child(parent, child): child.parent = None node = child.next_sibling node.prev_sibling = child.prev_sibling child.prev_sibling.next_sibling = node session.delete(child) root = Node('root') about = Node('about') cats = Node('cats') stories = Node('stories') bruce = Node('bruce') append_child(root, about) assert (about.prev_sibling is None) append_child(root, cats) assert (cats.prev_sibling is about) assert (cats.next_sibling is None) assert (about.next_sibling is cats) assert (about.prev_sibling is None) append_child(root, stories) append_child(root, bruce) session.add(root) session.flush() remove_child(root, cats) # pre-trigger lazy loader on 'cats' to make the test easier cats.children self.assert_sql_execution( testing.db, session.flush, AllOf( CompiledSQL( "UPDATE node SET prev_sibling_id=:prev_sibling_id " "WHERE node.id = :node_id", lambda ctx: { 'prev_sibling_id': about.id, 'node_id': stories.id }), CompiledSQL( "UPDATE node SET next_sibling_id=:next_sibling_id " "WHERE node.id = :node_id", lambda ctx: { 'next_sibling_id': stories.id, 'node_id': about.id }), CompiledSQL( "UPDATE node SET next_sibling_id=:next_sibling_id " "WHERE node.id = :node_id", lambda ctx: { 'next_sibling_id': None, 'node_id': cats.id }), ), CompiledSQL("DELETE FROM node WHERE node.id = :id", lambda ctx: [{ 'id': cats.id }])) session.delete(root) self.assert_sql_execution( testing.db, session.flush, CompiledSQL( "UPDATE node SET next_sibling_id=:next_sibling_id " "WHERE node.id = :node_id", lambda ctx: [{ 'node_id': about.id, 'next_sibling_id': None }, { 'node_id': stories.id, 'next_sibling_id': None }]), AllOf( CompiledSQL("DELETE FROM node WHERE node.id = :id", lambda ctx: {'id': about.id}), CompiledSQL("DELETE FROM node WHERE node.id = :id", lambda ctx: {'id': stories.id}), CompiledSQL("DELETE FROM node WHERE node.id = :id", lambda ctx: {'id': bruce.id}), ), CompiledSQL("DELETE FROM node WHERE node.id = :id", lambda ctx: {'id': root.id}), ) about = Node('about') cats = Node('cats') about.next_sibling = cats cats.prev_sibling = about session.add(about) session.flush() session.delete(about) cats.prev_sibling = None session.flush()
Column("create_time", DateTime, default=now), Column("update_time", DateTime, default=now, onupdate=now), Column("name", TrimmedString(255), index=True, unique=True), Column("description", TEXT), Column("deleted", Boolean, index=True, default=False)) Tag.table = Table("tag", metadata, Column("id", Integer, primary_key=True), Column("type", Integer), Column("parent_id", Integer, ForeignKey("tag.id")), Column("name", TrimmedString(255)), UniqueConstraint("name")) # With the tables defined we can define the mappers and setup the relationships between the model objects. mapper(User, User.table, properties=dict(active_repositories=relation(Repository, primaryjoin=((Repository.table.c.user_id == User.table.c.id) & (not_(Repository.table.c.deleted))), order_by=(Repository.table.c.name)), galaxy_sessions=relation(GalaxySession, order_by=desc(GalaxySession.table.c.update_time)), api_keys=relation(APIKeys, backref="user", order_by=desc(APIKeys.table.c.create_time)))) mapper(PasswordResetToken, PasswordResetToken.table, properties=dict(user=relation(User, backref="reset_tokens"))) mapper(APIKeys, APIKeys.table, properties={}) mapper(Group, Group.table, properties=dict(users=relation(UserGroupAssociation))) mapper(Role, Role.table, properties=dict( repositories=relation(RepositoryRoleAssociation, primaryjoin=((Role.table.c.id == RepositoryRoleAssociation.table.c.role_id) & (RepositoryRoleAssociation.table.c.repository_id == Repository.table.c.id))), users=relation(UserRoleAssociation,
def test_post_update_m2o(self): """A cycle between two rows, with a post_update on the many-to-one""" person, ball, Ball, Person = (self.tables.person, self.tables.ball, self.classes.Ball, self.classes.Person) mapper(Ball, ball) mapper(Person, person, properties=dict( balls=relationship( Ball, primaryjoin=ball.c.person_id == person.c.id, remote_side=ball.c.person_id, post_update=False, cascade="all, delete-orphan"), favorite=relationship( Ball, primaryjoin=person.c.favorite_ball_id == ball.c.id, remote_side=person.c.favorite_ball_id, post_update=True))) b = Ball(data='some data') p = Person(data='some data') p.balls.append(b) p.balls.append(Ball(data='some data')) p.balls.append(Ball(data='some data')) p.balls.append(Ball(data='some data')) p.favorite = b sess = create_session() sess.add(b) sess.add(p) self.assert_sql_execution( testing.db, sess.flush, RegexSQL("^INSERT INTO person", {'data': 'some data'}), RegexSQL("^INSERT INTO ball", lambda c: { 'person_id': p.id, 'data': 'some data' }), RegexSQL("^INSERT INTO ball", lambda c: { 'person_id': p.id, 'data': 'some data' }), RegexSQL("^INSERT INTO ball", lambda c: { 'person_id': p.id, 'data': 'some data' }), RegexSQL("^INSERT INTO ball", lambda c: { 'person_id': p.id, 'data': 'some data' }), ExactSQL( "UPDATE person SET favorite_ball_id=:favorite_ball_id " "WHERE person.id = :person_id", lambda ctx: { 'favorite_ball_id': p.favorite.id, 'person_id': p.id }), ) sess.delete(p) self.assert_sql_execution( testing.db, sess.flush, ExactSQL( "UPDATE person SET favorite_ball_id=:favorite_ball_id " "WHERE person.id = :person_id", lambda ctx: { 'person_id': p.id, 'favorite_ball_id': None }), ExactSQL( "DELETE FROM ball WHERE ball.id = :id", None ), # lambda ctx:[{'id': 1L}, {'id': 4L}, {'id': 3L}, {'id': 2L}]) ExactSQL("DELETE FROM person WHERE person.id = :id", lambda ctx: [{ 'id': p.id }]))
MetaData, String, ForeignKey, ) from sqlalchemy.orm import (backref, mapper, relationship) metadata_classical = MetaData() # Declare TestUser and Email mappings test_users = Table('test_users', metadata_classical, Column('id', Integer, primary_key=True), Column('name', String(50)), Column('fullname', String(50)), Column('password', String(50))) emails = Table( 'emails', metadata_classical, Column('id', Integer, primary_key=True), Column('user_id', Integer, ForeignKey('test_users.id', ondelete='CASCADE')), Column('address', String(50))) mapper(TestUser, test_users, properties={ 'emails': relationship(Email, backref=backref('test_user'), cascade="all, delete, delete-orphan", passive_deletes=True, order_by=emails.c.address) }) mapper(Email, emails)
card = Table( 'cards', metadata, Column('id', Integer, primary_key=True), Column('word', String(50)), Column('translate', String(50)), Column('deckid', Integer), Column('userid', Integer, ForeignKey('user.id')), ) from main.server import Card class User(object): pass print(dir(User)) mapper(User, user, properties={ 'cards': relationship(Card, backref='user', order_by=card.c.id) }) print(dir(User)) mapper(Card, card) session.add(Card('word', 'translate')) session.commit()
def test_post_update_o2m(self): """A cycle between two rows, with a post_update on the one-to-many""" person, ball, Ball, Person = (self.tables.person, self.tables.ball, self.classes.Ball, self.classes.Person) mapper(Ball, ball) mapper(Person, person, properties=dict( balls=relationship( Ball, primaryjoin=ball.c.person_id == person.c.id, remote_side=ball.c.person_id, cascade="all, delete-orphan", post_update=True, backref='person'), favorite=relationship( Ball, primaryjoin=person.c.favorite_ball_id == ball.c.id, remote_side=person.c.favorite_ball_id))) b = Ball(data='some data') p = Person(data='some data') p.balls.append(b) b2 = Ball(data='some data') p.balls.append(b2) b3 = Ball(data='some data') p.balls.append(b3) b4 = Ball(data='some data') p.balls.append(b4) p.favorite = b sess = create_session() sess.add_all((b, p, b2, b3, b4)) self.assert_sql_execution( testing.db, sess.flush, CompiledSQL( "INSERT INTO ball (person_id, data) " "VALUES (:person_id, :data)", { 'person_id': None, 'data': 'some data' }), CompiledSQL( "INSERT INTO ball (person_id, data) " "VALUES (:person_id, :data)", { 'person_id': None, 'data': 'some data' }), CompiledSQL( "INSERT INTO ball (person_id, data) " "VALUES (:person_id, :data)", { 'person_id': None, 'data': 'some data' }), CompiledSQL( "INSERT INTO ball (person_id, data) " "VALUES (:person_id, :data)", { 'person_id': None, 'data': 'some data' }), CompiledSQL( "INSERT INTO person (favorite_ball_id, data) " "VALUES (:favorite_ball_id, :data)", lambda ctx: { 'favorite_ball_id': b.id, 'data': 'some data' }), CompiledSQL( "UPDATE ball SET person_id=:person_id " "WHERE ball.id = :ball_id", lambda ctx: [{ 'person_id': p.id, 'ball_id': b.id }, { 'person_id': p.id, 'ball_id': b2.id }, { 'person_id': p.id, 'ball_id': b3.id }, { 'person_id': p.id, 'ball_id': b4.id }]), ) sess.delete(p) self.assert_sql_execution( testing.db, sess.flush, CompiledSQL( "UPDATE ball SET person_id=:person_id " "WHERE ball.id = :ball_id", lambda ctx: [{ 'person_id': None, 'ball_id': b.id }, { 'person_id': None, 'ball_id': b2.id }, { 'person_id': None, 'ball_id': b3.id }, { 'person_id': None, 'ball_id': b4.id }]), CompiledSQL("DELETE FROM person WHERE person.id = :id", lambda ctx: [{ 'id': p.id }]), CompiledSQL( "DELETE FROM ball WHERE ball.id = :id", lambda ctx: [ { 'id': b.id }, { 'id': b2.id }, { 'id': b3.id }, { 'id': b4.id } ]))
def __init__(self, path): # Создаём движок базы данных self.database_engine = create_engine( f'sqlite:///{path}', echo=False, pool_recycle=7200, connect_args={'check_same_thread': False}) # Создаём объект MetaData self.metadata = MetaData() # Создаём таблицу пользователей users_table = Table('Users', self.metadata, Column('id', Integer, primary_key=True), Column('name', String, unique=True), Column('last_login', DateTime), Column('passwd_hash', String), Column('pubkey', Text)) # Создаём таблицу активных пользователей active_users_table = Table( 'Active_users', self.metadata, Column('id', Integer, primary_key=True), Column('user', ForeignKey('Users.id'), unique=True), Column('ip_address', String), Column('port', Integer), Column('login_time', DateTime)) # Создаём таблицу истории входов user_login_history = Table('Login_history', self.metadata, Column('id', Integer, primary_key=True), Column('name', ForeignKey('Users.id')), Column('date_time', DateTime), Column('ip', String), Column('port', String)) # Создаём таблицу контактов пользователей contacts = Table('Contacts', self.metadata, Column('id', Integer, primary_key=True), Column('user', ForeignKey('Users.id')), Column('contact', ForeignKey('Users.id'))) # Создаём таблицу статистики пользователей users_history_table = Table('History', self.metadata, Column('id', Integer, primary_key=True), Column('user', ForeignKey('Users.id')), Column('sent', Integer), Column('accepted', Integer)) # Создаём таблицы self.metadata.create_all(self.database_engine) # Создаём отображения mapper(self.AllUsers, users_table) mapper(self.ActiveUsers, active_users_table) mapper(self.LoginHistory, user_login_history) mapper(self.UsersContacts, contacts) mapper(self.UsersHistory, users_history_table) # Создаём сессию Session = sessionmaker(bind=self.database_engine) self.session = Session() # Если в таблице активных пользователей есть записи, то их необходимо # удалить self.session.query(self.ActiveUsers).delete() self.session.commit()
def test_one(self): child1, child2, child3, Parent, parent, Child1, Child2, Child3 = ( self.tables.child1, self.tables.child2, self.tables.child3, self.classes.Parent, self.tables.parent, self.classes.Child1, self.classes.Child2, self.classes.Child3) mapper(Parent, parent, properties={ 'c1s': relationship(Child1, primaryjoin=child1.c.parent_id == parent.c.id), 'c2s': relationship(Child2, primaryjoin=child2.c.parent_id == parent.c.id), 'c3s': relationship(Child3, primaryjoin=child3.c.parent_id == parent.c.id), 'c1': relationship(Child1, primaryjoin=child1.c.id == parent.c.c1_id, post_update=True), 'c2': relationship(Child2, primaryjoin=child2.c.id == parent.c.c2_id, post_update=True), 'c3': relationship(Child3, primaryjoin=child3.c.id == parent.c.c3_id, post_update=True), }) mapper(Child1, child1) mapper(Child2, child2) mapper(Child3, child3) sess = create_session() p1 = Parent('p1') c11, c12, c13 = Child1('c1'), Child1('c2'), Child1('c3') c21, c22, c23 = Child2('c1'), Child2('c2'), Child2('c3') c31, c32, c33 = Child3('c1'), Child3('c2'), Child3('c3') p1.c1s = [c11, c12, c13] p1.c2s = [c21, c22, c23] p1.c3s = [c31, c32, c33] sess.add(p1) sess.flush() p1.c1 = c12 p1.c2 = c23 p1.c3 = c31 self.assert_sql_execution( testing.db, sess.flush, CompiledSQL( "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, " "c3_id=:c3_id WHERE parent.id = :parent_id", lambda ctx: { 'c2_id': c23.id, 'parent_id': p1.id, 'c1_id': c12.id, 'c3_id': c31.id })) p1.c1 = p1.c2 = p1.c3 = None self.assert_sql_execution( testing.db, sess.flush, CompiledSQL( "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, " "c3_id=:c3_id WHERE parent.id = :parent_id", lambda ctx: { 'c2_id': None, 'parent_id': p1.id, 'c1_id': None, 'c3_id': None }))
def __init__(self, uuid, name, status): self.uuid = uuid self.name = name self.status = status class Check(object): def __init__(self, revision, checker, state, created, updated): self.revision_key = revision.key self.checker_key = checker.key self.state = state self.created = created self.updated = updated mapper(Account, account_table) mapper( Project, project_table, properties=dict( branches=relationship(Branch, backref='project', order_by=branch_table.c.name, cascade='all, delete-orphan'), changes=relationship(Change, backref='project', order_by=change_table.c.number, cascade='all, delete-orphan'), topics=relationship(Topic, secondary=project_topic_table, order_by=topic_table.c.name,
def test_five(self): """tests the late compilation of mappers""" mapper( SpecLine, specification_table, properties=dict( leader=relationship( Assembly, lazy="joined", uselist=False, foreign_keys=[specification_table.c.leader_id], primaryjoin=specification_table.c.leader_id == products_table.c.product_id, backref=backref("specification"), ), follower=relationship( Product, lazy="joined", uselist=False, foreign_keys=[specification_table.c.follower_id], primaryjoin=specification_table.c.follower_id == products_table.c.product_id, ), quantity=specification_table.c.quantity, ), ) mapper( Product, products_table, polymorphic_on=products_table.c.product_type, polymorphic_identity="product", properties={ "documents": relationship( Document, lazy="select", backref="product", cascade="all, delete-orphan", ) }, ) mapper(Detail, inherits=Product, polymorphic_identity="detail") mapper( Document, documents_table, polymorphic_on=documents_table.c.document_type, polymorphic_identity="document", properties=dict( name=documents_table.c.name, data=deferred(documents_table.c.data), ), ) mapper( RasterDocument, inherits=Document, polymorphic_identity="raster_document", ) mapper(Assembly, inherits=Product, polymorphic_identity="assembly") session = create_session() a1 = Assembly(name="a1") a1.specification.append(SpecLine(follower=Detail(name="d1"))) a1.documents.append(Document("doc1")) a1.documents.append(RasterDocument("doc2")) session.add(a1) orig = repr(a1) session.flush() session.expunge_all() a1 = session.query(Product).filter_by(name="a1").one() new = repr(a1) print(orig) print(new) assert (orig == new == "<Assembly a1> specification=" "[<SpecLine 1.0 <Detail d1>>] documents=[<Document doc1>, " "<RasterDocument doc2>]")
#!/usr/bin/env python # -*- coding:utf-8 -*- from . import metadata from sqlalchemy import Column, String, Text, Integer, Table from sqlalchemy.orm import mapper MachineRelationTB = Table( 'fb_machine_relation', metadata, Column('id', String(40), primary_key=True), Column('project_id', String, nullable=False), Column('machine_id', String, nullable=False), Column('create_time', Integer), ) class MachineRelationMapping(object): pass mapper(MachineRelationMapping, MachineRelationTB)
def setup_mappers(cls): mapper(Parent, parent, properties={'children': relationship(Child, backref='parent')}) mapper(Child, child)
def setup_mappers(cls): A, B, C, D, E, F, G = cls.classes("A", "B", "C", "D", "E", "F", "G") a, b, c, d, e, f, g = cls.tables("a", "b", "c", "d", "e", "f", "g") mapper(A, a, properties={"bs": relationship(B), "es": relationship(E)}) mapper(B, b, properties={"cs": relationship(C)}) mapper(C, c, properties={"ds": relationship(D)}) mapper(D, d) mapper(E, e, properties={"fs": relationship(F), "gs": relationship(G)}) mapper(F, f) mapper(G, g)
metadata.create_all(engine) class User: def __init__(self, name, fullname, password): self.name = name self.fullname = fullname self.password = password def __repr__(self): return "<User('%s','%s', '%s')>" % (self.name, self.fullname, self.password) mapper(User, users_table) user = User("Bob", "Bobster", "qweasdzxc") from sqlalchemy.orm import sessionmaker Session = sessionmaker(bind=engine) session = Session() session.add(user) session.commit() from sqlalchemy.ext.declarative import declarative_base
def setup_mappers(cls): A = cls.classes.A a = cls.tables.a mapper(A, a)
def setup_mappers(cls): User, users = cls.classes.User, cls.tables.users Address, addresses = cls.classes.Address, cls.tables.addresses mapper(User, users, properties={"addresses": relationship(Address)}) mapper(Address, addresses)
books = Table( 'books', BASE.metadata, Column('id', Integer, primary_key=True), Column('title', Unicode(200), nullable=False), Column('user_id', Integer, ForeignKey('user_table.id')), ) Index("ix_user_title", books.c.user_id, books.c.title) class Book(object): pass mapper(Book, books, {'user': relation(User, backref='books')}) # Not mapped table notes = Table( 'notes', BASE.metadata, Column('id', Integer, primary_key=True), Column('name', Unicode(200), nullable=False), Column('user_id', Integer, ForeignKey('user_table.id')), ) if __name__ == '__main__': import sadisplay desc = sadisplay.describe(globals().values())
def setup_mappers(cls): Parent = cls.classes.Parent parent = cls.tables.parent mapper(Parent, parent)