def includeme(config): tables = config.registry['metadata'].tables config.include('amnesia.modules.content.mapper') config.include('amnesia.modules.content_type.mapper') orm.mapper(Folder, tables['folder'], inherits=Content, polymorphic_identity=get_type_id(config, 'folder'), inherit_condition=tables['folder'].c.content_id == tables['content'].c.id, properties={ 'alternate_index': orm.relationship( Content, primaryjoin=tables['folder'].c.index_content_id == tables['content'].c.id, innerjoin=True, uselist=False, post_update=True, backref=orm.backref('indexes') ), 'polymorphic_children': orm.relationship( ContentType, secondary=tables['folder_polymorphic_loading'] ) } )
def test_load_only_path_specific(self): User = self.classes.User Address = self.classes.Address Order = self.classes.Order users = self.tables.users addresses = self.tables.addresses orders = self.tables.orders mapper(User, users, properties=util.OrderedDict([ ("addresses", relationship(Address, lazy="joined")), ("orders", relationship(Order, lazy="joined")) ])) mapper(Address, addresses) mapper(Order, orders) sess = create_session() q = sess.query(User).options( load_only("name").defaultload("addresses").load_only("id", "email_address"), defaultload("orders").load_only("id") ) # hmmmm joinedload seems to be forcing users.id into here... self.assert_compile( q, "SELECT users.id AS users_id, users.name AS users_name, " "addresses_1.id AS addresses_1_id, " "addresses_1.email_address AS addresses_1_email_address, " "orders_1.id AS orders_1_id FROM users " "LEFT OUTER JOIN addresses AS addresses_1 " "ON users.id = addresses_1.user_id " "LEFT OUTER JOIN orders AS orders_1 ON users.id = orders_1.user_id" )
def test_one(self): p_m = mapper(Part, parts) mapper(InheritedPart, inherited_part, properties=dict( part=relationship(Part, lazy='joined'))) d_m = mapper(Design, design, properties=dict( inheritedParts=relationship(InheritedPart, cascade="all, delete-orphan", backref="design"))) mapper(DesignType, design_types) d_m.add_property( "type", relationship(DesignType, lazy='joined', backref="designs")) p_m.add_property( "design", relationship( Design, lazy='joined', backref=backref("parts", cascade="all, delete-orphan"))) d = Design() sess = create_session() sess.add(d) sess.flush() sess.expunge_all() x = sess.query(Design).get(1) x.inheritedParts
def test_overlapping_attribute_error(self): place, Transition, place_input, Place, transition = ( self.tables.place, self.classes.Transition, self.tables.place_input, self.classes.Place, self.tables.transition, ) mapper( Place, place, properties={ "transitions": relationship( Transition, secondary=place_input, backref="places" ) }, ) mapper( Transition, transition, properties={ "places": relationship( Place, secondary=place_input, backref="transitions" ) }, ) assert_raises_message( sa.exc.ArgumentError, "property of that name exists", sa.orm.configure_mappers, )
def test_deep_options(self): users, items, order_items, Order, Item, User, orders = (self.tables.users, self.tables.items, self.tables.order_items, self.classes.Order, self.classes.Item, self.classes.User, self.tables.orders) mapper(Item, items, properties=dict( description=deferred(items.c.description))) mapper(Order, orders, properties=dict( items=relationship(Item, secondary=order_items))) mapper(User, users, properties=dict( orders=relationship(Order, order_by=orders.c.id))) sess = create_session() q = sess.query(User).order_by(User.id) l = q.all() item = l[0].orders[1].items[1] def go(): eq_(item.description, 'item 4') self.sql_count_(1, go) eq_(item.description, 'item 4') sess.expunge_all() l = q.options(undefer('orders.items.description')).all() item = l[0].orders[1].items[1] def go(): eq_(item.description, 'item 4') self.sql_count_(0, go) eq_(item.description, 'item 4')
def test_path_entity(self): """test the legacy *addl_attrs argument.""" User = self.classes.User Order = self.classes.Order Item = self.classes.Item users = self.tables.users orders = self.tables.orders items = self.tables.items order_items = self.tables.order_items mapper(User, users, properties={ "orders": relationship(Order, lazy="joined") }) mapper(Order, orders, properties={ "items": relationship(Item, secondary=order_items, lazy="joined") }) mapper(Item, items) sess = create_session() exp = ("SELECT users.id AS users_id, users.name AS users_name, " "items_1.id AS items_1_id, orders_1.id AS orders_1_id, " "orders_1.user_id AS orders_1_user_id, orders_1.address_id " "AS orders_1_address_id, orders_1.description AS " "orders_1_description, orders_1.isopen AS orders_1_isopen " "FROM users LEFT OUTER JOIN orders AS orders_1 " "ON users.id = orders_1.user_id LEFT OUTER JOIN " "(order_items AS order_items_1 JOIN items AS items_1 " "ON items_1.id = order_items_1.item_id) " "ON orders_1.id = order_items_1.order_id") q = sess.query(User).options(defer(User.orders, Order.items, Item.description)) self.assert_compile(q, exp)
def create_mapper(rack_tbl, rack_barcoded_location_tbl): "Mapper factory." rbl = rack_barcoded_location_tbl m = mapper(Rack, rack_tbl, id_attribute='rack_id', slug_expression=lambda cls: as_slug_expression(cls.barcode), properties=dict( label=column_property( rack_tbl.c.label, comparator_factory=CaseInsensitiveComparator ), specs=relationship(RackSpecs, innerjoin=True, uselist=False), status=relationship(ItemStatus, innerjoin=True, uselist=False), location_rack=relationship(BarcodedLocationRack, uselist=False, back_populates='rack', cascade='all,delete,delete-orphan'), _location=relationship(BarcodedLocation, viewonly=True, uselist=False, secondary=rbl, foreign_keys=(rbl.c.rack_id, rbl.c.barcoded_location_id), ), ), polymorphic_on=rack_tbl.c.rack_type, polymorphic_identity=RACK_TYPES.RACK, ) return m
def test_flush_size(self): foobars, nodes = self.tables.foobars, self.tables.nodes class Node(fixtures.ComparableEntity): pass class FooBar(fixtures.ComparableEntity): pass mapper(Node, nodes, properties={ 'children':relationship(Node), 'foobars':relationship(FooBar) }) mapper(FooBar, foobars) sess = create_session() n1 = Node(data='n1') n2 = Node(data='n2') n1.children.append(n2) sess.add(n1) # ensure "foobars" doesn't get yanked in here self._assert_uow_size(sess, 3) n1.foobars.append(FooBar()) # saveupdateall/deleteall for FooBar added here, # plus processstate node.foobars # currently the "all" procs stay in pairs self._assert_uow_size(sess, 6) sess.flush()
def test_limit(self): """test limit operations combined with lazy-load relationships.""" users, items, order_items, orders, Item, User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) mapper(Item, items) mapper(Order, orders, properties={"items": relationship(Item, secondary=order_items, lazy="select")}) mapper( User, users, properties={ "addresses": relationship(mapper(Address, addresses), lazy="select"), "orders": relationship(Order, lazy="select"), }, ) sess = create_session() q = sess.query(User) if testing.against("maxdb", "mssql"): l = q.limit(2).all() assert self.static.user_all_result[:2] == l else: l = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == l
def test_distinct(self): users, items, order_items, orders, Item, User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) mapper(Item, items) mapper(Order, orders, properties={"items": relationship(Item, secondary=order_items, lazy="select")}) mapper( User, users, properties={ "addresses": relationship(mapper(Address, addresses), lazy="select"), "orders": relationship(Order, lazy="select"), }, ) sess = create_session() q = sess.query(User) # use a union all to get a lot of rows to join against u2 = users.alias("u2") s = sa.union_all(u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias("u") l = q.filter(s.c.u2_id == User.id).order_by(User.id).distinct().all() eq_(self.static.user_all_result, l)
def setup_mappers(cls): Account, Transaction, transactions, accounts, entries, Entry = ( cls.classes.Account, cls.classes.Transaction, cls.tables.transactions, cls.tables.accounts, cls.tables.entries, cls.classes.Entry, ) mapper(Account, accounts) mapper(Transaction, transactions) mapper( Entry, entries, properties=dict( account=relationship( Account, uselist=False, backref=backref( "entries", lazy="select", order_by=entries.c.entry_id ), ), transaction=relationship( Transaction, uselist=False, backref=backref( "entries", lazy="joined", order_by=entries.c.entry_id ), ), ), )
def setup_mappers(cls): Right, Middle, middle, right, left, Left = ( cls.classes.Right, cls.classes.Middle, cls.tables.middle, cls.tables.right, cls.tables.left, cls.classes.Left, ) # set up bi-directional eager loads mapper(Left, left) mapper(Right, right) mapper( Middle, middle, properties=dict( left=relationship( Left, lazy="joined", backref=backref("middle", lazy="joined"), ), right=relationship( Right, lazy="joined", backref=backref("middle", lazy="joined"), ), ), ),
def test_orphan(self): """test that an entity can have two parent delete-orphan cascades, and is detected as an orphan when saved without a parent.""" class Address(_fixtures.Base): pass class Home(_fixtures.Base): pass class Business(_fixtures.Base): pass mapper(Address, addresses) mapper(Home, homes, properties={'address' : relationship(Address, cascade='all,delete-orphan', single_parent=True)}) mapper(Business, businesses, properties={'address' : relationship(Address, cascade='all,delete-orphan', single_parent=True)}) session = create_session() a1 = Address() session.add(a1) try: session.flush() assert False except orm_exc.FlushError, e: assert True
def setup_mappers(cls): mapper(T1, t1, properties=dict(t2=relationship(T2, cascade='all, delete-orphan', single_parent=True))) mapper(T2, t2, properties=dict(t3=relationship(T3, cascade='all, delete-orphan', single_parent=True, backref=backref('t2', uselist=False)))) mapper(T3, t3)
def test_pending_expunge(self): class Order(_fixtures.Base): pass class Item(_fixtures.Base): pass class Attribute(_fixtures.Base): pass mapper(Attribute, attributes) mapper(Item, items, properties=dict( attributes=relationship(Attribute, cascade="all,delete-orphan", backref="item") )) mapper(Order, orders, properties=dict( items=relationship(Item, cascade="all,delete-orphan", backref="order") )) s = create_session() order = Order(name="order1") s.add(order) attr = Attribute(name="attr1") item = Item(name="item1", attributes=[attr]) order.items.append(item) order.items.remove(item) assert item not in s assert attr not in s s.flush() assert orders.count().scalar() == 1 assert items.count().scalar() == 0 assert attributes.count().scalar() == 0
def test_non_orphan(self): """test that an entity can have two parent delete-orphan cascades, and persists normally.""" class Address(_fixtures.Base): pass class Home(_fixtures.Base): pass class Business(_fixtures.Base): pass mapper(Address, addresses) mapper(Home, homes, properties={'address' : relationship(Address, cascade='all,delete-orphan', single_parent=True)}) mapper(Business, businesses, properties={'address' : relationship(Address, cascade='all,delete-orphan', single_parent=True)}) session = create_session() h1 = Home(description='home1', address=Address(street='address1')) b1 = Business(description='business1', address=Address(street='address2')) session.add_all((h1,b1)) session.flush() session.expunge_all() eq_(session.query(Home).get(h1.id), Home(description='home1', address=Address(street='address1'))) eq_(session.query(Business).get(b1.id), Business(description='business1', address=Address(street='address2')))
def test_options_with_descriptors(self): users, addresses, dingalings = (self.tables.users, self.tables.addresses, self.tables.dingalings) mapper(User, users, properties={ 'addresses':relationship(Address, backref="user") }) mapper(Address, addresses, properties={ 'dingaling':relationship(Dingaling) }) mapper(Dingaling, dingalings) sess = create_session() u1 = User(name='ed') u1.addresses.append(Address(email_address='*****@*****.**')) sess.add(u1) sess.flush() sess.expunge_all() for opt in [ sa.orm.joinedload(User.addresses), sa.orm.joinedload("addresses"), sa.orm.defer("name"), sa.orm.defer(User.name), sa.orm.joinedload("addresses", Address.dingaling), ]: opt2 = pickle.loads(pickle.dumps(opt)) eq_(opt.key, opt2.key) u1 = sess.query(User).options(opt).first() u2 = pickle.loads(pickle.dumps(u1))
def test_collection_setstate(self): """test a particular cycle that requires CollectionAdapter to not rely upon InstanceState to deserialize.""" m = MetaData() c1 = Table('c1', m, Column('parent_id', String, ForeignKey('p.id'), primary_key=True) ) c2 = Table('c2', m, Column('parent_id', String, ForeignKey('p.id'), primary_key=True) ) p = Table('p', m, Column('id', String, primary_key=True) ) mapper(Parent, p, properties={ 'children1':relationship(Child1), 'children2':relationship(Child2) }) mapper(Child1, c1) mapper(Child2, c2) obj = Parent() screen1 = Screen(obj) screen1.errors = [obj.children1, obj.children2] screen2 = Screen(Child2(), screen1) pickle.loads(pickle.dumps(screen2))
def create_mapper(tube_location_tbl): "Mapper factory." m = mapper(TubeLocation, tube_location_tbl, properties=dict( position=relationship(RackPosition, uselist=False, innerjoin=True, # lazy='joined' # lazy='subquery' ), rack=relationship(TubeRack, uselist=False, innerjoin=True, # cascade='all', back_populates='tube_locations'), container=relationship(Tube, uselist=False, # cascade='all,delete,delete-orphan', # single_parent=True, back_populates='location', innerjoin=True, cascade_backrefs=False # lazy='joined' ) # lazy='subquery'), ), ) return m
def _upgrade_fixture(self): users, Keyword, items, order_items, orders, Item, User, \ Address, keywords, item_keywords, Order, addresses = \ self.tables.users, self.classes.Keyword, self.tables.items, \ self.tables.order_items, self.tables.orders, \ self.classes.Item, self.classes.User, self.classes.Address, \ self.tables.keywords, self.tables.item_keywords, \ self.classes.Order, self.tables.addresses mapper(Address, addresses) mapper(Keyword, keywords) mapper(Item, items, properties=dict( keywords=relationship(Keyword, secondary=item_keywords, lazy='select', order_by=item_keywords.c.keyword_id))) mapper(Order, orders, properties=dict( items=relationship(Item, secondary=order_items, lazy=True, order_by=order_items.c.item_id))) mapper(User, users, properties=dict( addresses=relationship(Address, lazy=True, order_by=addresses.c.id), orders=relationship(Order, order_by=orders.c.id))) return create_session()
def test_distinct(self): users, items, order_items, orders, \ Item, User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses) mapper(Item, items) mapper(Order, orders, properties={ 'items': relationship(Item, secondary=order_items, lazy='select') }) mapper(User, users, properties={ 'addresses': relationship( mapper(Address, addresses), lazy='select'), 'orders': relationship(Order, lazy='select') }) sess = create_session() q = sess.query(User) # use a union all to get a lot of rows to join against u2 = users.alias('u2') s = sa.union_all( u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias('u') result = q.filter(s.c.u2_id == User.id).order_by(User.id).distinct() \ .all() eq_(self.static.user_all_result, result)
def _u_ad_fixture(self, populate_user, dont_use_get=False): users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses': relationship(Address, back_populates='user') }) mapper(Address, addresses, properties={ 'user': relationship( User, primaryjoin=and_( users.c.id == addresses.c.user_id, users.c.id != 27) if dont_use_get else None, back_populates='addresses' ) }) sess = create_session() a1 = Address(email_address='a1') sess.add(a1) if populate_user: a1.user = User(name='ed') sess.flush() if populate_user: sess.expire_all() return User, Address, sess, a1
def create_mapper(molecule_design_tbl, molecule_design_structure_tbl, single_supplier_molecule_design_tbl, molecule_design_gene_tbl, refseq_gene_tbl): "Mapper factory." md = molecule_design_tbl ssmd = single_supplier_molecule_design_tbl mdg = molecule_design_gene_tbl rsg = refseq_gene_tbl m = mapper(MoleculeDesign, molecule_design_tbl, id_attribute='molecule_design_id', properties=dict( molecule_type=relationship(MoleculeType), #, lazy='joined'), chemical_structures=relationship( ChemicalStructure, secondary=molecule_design_structure_tbl, back_populates='molecule_designs'), supplier_molecule_designs= relationship(SupplierMoleculeDesign, secondary=ssmd, back_populates='molecule_design'), genes=relationship(Gene, viewonly=True, secondary=mdg, primaryjoin=(mdg.c.molecule_design_id == md.c.molecule_design_id), secondaryjoin=(mdg.c.gene_id == rsg.c.gene_id), foreign_keys=(mdg.c.molecule_design_id, mdg.c.gene_id), back_populates='molecule_designs', ), ), polymorphic_on=molecule_design_tbl.c.molecule_type_id, polymorphic_identity=MOLECULE_TYPE, ) return m
def test_cycle(self): C2, C1, t2, t1 = (self.classes.C2, self.classes.C1, self.tables.t2, self.tables.t1) mapper(C2, t2, properties={ 'c1s': relationship(C1, primaryjoin=t2.c.c1 == t1.c.c2, uselist=True)}) mapper(C1, t1, properties={ 'c2s': relationship(C2, primaryjoin=t1.c.c1 == t2.c.c2, uselist=True)}) a = C1() b = C2() c = C1() d = C2() e = C2() f = C2() a.c2s.append(b) d.c1s.append(c) b.c1s.append(c) sess = create_session() sess.add_all((a, b, c, d, e, f)) sess.flush()
def test_cycle(self): """ This test has a peculiar aspect in that it doesn't create as many dependent relationships as the other tests, and revealed a small glitch in the circular dependency sorting. """ person, ball, Ball, Person = (self.tables.person, self.tables.ball, self.classes.Ball, self.classes.Person) mapper(Ball, ball) mapper(Person, person, properties=dict( balls=relationship(Ball, primaryjoin=ball.c.person_id == person.c.id, remote_side=ball.c.person_id), favorite=relationship(Ball, primaryjoin=person.c.favorite_ball_id == ball.c.id, remote_side=ball.c.id))) b = Ball() p = Person() p.balls.append(b) sess = create_session() sess.add(p) sess.flush()
def test_limit(self): """Limit operations combined with lazy-load relationships.""" users, items, order_items, orders, Item, User, Address, Order, addresses = (self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses) mapper(Item, items) mapper(Order, orders, properties={ 'items':relationship(Item, secondary=order_items, lazy='subquery', order_by=items.c.id) }) mapper(User, users, properties={ 'addresses':relationship(mapper(Address, addresses), lazy='subquery', order_by=addresses.c.id), 'orders':relationship(Order, lazy='select', order_by=orders.c.id) }) sess = create_session() q = sess.query(User) l = q.order_by(User.id).limit(2).offset(1).all() eq_(self.static.user_all_result[1:3], l) sess = create_session() l = q.order_by(sa.desc(User.id)).limit(2).offset(2).all() eq_(list(reversed(self.static.user_all_result[0:2])), l)
def setup_mappers(cls): Category, owners, Option, tests, Thing, Owner, options, categories = ( cls.classes.Category, cls.tables.owners, cls.classes.Option, cls.tables.tests, cls.classes.Thing, cls.classes.Owner, cls.tables.options, cls.tables.categories) mapper(Owner, owners) mapper(Category, categories) mapper(Option, options, properties=dict( owner=relationship(Owner, viewonly=True), test=relationship(Thing, viewonly=True))) mapper(Thing, tests, properties=dict( owner=relationship(Owner, backref='tests'), category=relationship(Category), owner_option=relationship( Option, primaryjoin=sa.and_( tests.c.id == options.c.test_id, tests.c.owner_id == options.c.owner_id), foreign_keys=[options.c.test_id, options.c.owner_id], uselist=False)))
def test_limit(self): """test limit operations combined with lazy-load relationships.""" users, items, order_items, orders, Item, \ User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses) mapper(Item, items) mapper(Order, orders, properties={ 'items': relationship(Item, secondary=order_items, lazy='select') }) mapper(User, users, properties={ 'addresses': relationship( mapper(Address, addresses), lazy='select'), 'orders': relationship(Order, lazy='select') }) sess = create_session() q = sess.query(User) if testing.against('mssql'): result = q.limit(2).all() assert self.static.user_all_result[:2] == result else: result = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == result
def upgrade(self): self.loadVersion() log.warning('Upgrading Database schema from version {0} to version {1}'.format(self._schema_version, self._version)) for version in range(1, self._version + 1): log.warning('Upgrading schema version to {0}'.format(version)) schema_fun = 'upgrade_{0}_Schema'.format(version) ddl_fun = 'upgrade_{0}_DDL'.format(version) if self._schema_version < version: if hasattr(self.__class__, ddl_fun) and callable(getattr(self.__class__, ddl_fun)): log.warning('Executing ddl function: {0}'.format(ddl_fun)) getattr(self, ddl_fun)() self._setSchemaVersion(version) if hasattr(self.__class__, schema_fun) and callable(getattr(self.__class__, schema_fun)): log.warning('Executing schema function: {0}'.format(schema_fun)) getattr(self, schema_fun)() mapper(FirmwareType, Table('firmware_type', self._metadata)) mapper(Firmware, Table('firmware', self._metadata), properties={ 'type' : relationship(FirmwareType) }) mapper(Node, Table('node', self._metadata), properties={ 'firmware' : relationship(Firmware) }) mapper(Sensor, Table('sensor', self._metadata), properties={ 'node' : relationship(Node) })
def fk_rel(cls, attrib='id', nullable=False, index=True, primary_key=False, doc=None, ondelete='CASCADE', backref=None, order_by=None): '''Returns a ForeignKey column and a relationship, while automatically setting the type of the foreign key. Usage:: # A relationship in an Address model pointing to a parent Person: person_id, person = fk_rel(Person, nullable=False, index=True, backref='addresses', ondelete='CASCADE') A backref is created only if you provide its name in the argument. ``nullable`` and ``index`` are usually ommited, because these are the default values and they are good. ``ondelete`` is "CASCADE" by default, but you can set it to "SET NULL", or None which translates to "NO ACTION" (less interesting). If provided, ``order_by`` is used on the backref. You may also pass an ``attrib`` which is the column name for the foreign key. ''' # http://docs.sqlalchemy.org/en/latest/orm/collections.html#passive-deletes if ondelete == 'CASCADE': cascade = CASC passive_deletes = True else: cascade = False # meaning "save-update, merge" passive_deletes = False return (fk(getattr(cls, attrib), nullable=nullable, index=index, primary_key=primary_key, doc=doc, ondelete=ondelete), relationship(cls, backref=_backref( backref, cascade=cascade, passive_deletes=passive_deletes, order_by=order_by)) if backref else relationship(cls))
def created_by(cls): return relationship("Account", foreign_keys=[cls.created_by_id])
class PersonModel(Base): __tablename__ = 'person' uuid = Column(Integer, primary_key=True) Articles = relationship("ArticleModel")
class MessageQueue(db.Model): """ Messages that have been received but that cannot be actioned until the User's public key has been unlocked (at which point they will be deleted). Fields: id - an integer identifier uniquely identifying the message in the queue local_id - the User receiving/sending the message remote_id - the Contact the message is to/from format - the protocol format of the payload body - the message payload, in a protocol-specific format """ INCOMING = 'application/x-diaspora-slap' PUBLIC_INCOMING = 'application/x-diaspora-public-slap' __tablename__ = 'message_queue' id = Column(Integer, primary_key=True) local_id = Column(Integer, ForeignKey('users.id'), nullable=True) remote_id = Column(Integer, ForeignKey('contacts.id'), nullable=True) format = Column(String, nullable=False) body = Column(LargeBinary, nullable=False) created_at = Column(DateTime(timezone=True), nullable=False, default=func.now()) last_attempted_at = Column(DateTime(timezone=True), nullable=True) error = Column(LargeBinary, nullable=True) local_user = relationship('User', backref='message_queue') class Queries: @classmethod def pending_items_for_user(cls, user): return and_( MessageQueue.format == MessageQueue.INCOMING, MessageQueue.local_user == user, or_( MessageQueue.last_attempted_at == None, MessageQueue.last_attempted_at <= datetime.now() - timedelta(minutes=5))) @classmethod def pending_public_items(cls): return and_( MessageQueue.format == MessageQueue.PUBLIC_INCOMING, or_( MessageQueue.last_attempted_at == None, MessageQueue.last_attempted_at <= datetime.now() - timedelta(minutes=5))) @classmethod def has_pending_items(cls, user): first = db.session.query(cls).filter( cls.Queries.pending_items_for_user(user)).order_by( cls.created_at).first() # Just tried if first and first.last_attempted_at and \ first.last_attempted_at > \ datetime.now() - timedelta(minutes=5): return False return bool(first and not first.error) @classmethod def process_queue(cls, query, user, max_items=None): processed = 0 for qi in query: if qi.error: break try: if max_items and processed > max_items: return qi.process_incoming(user) except Exception as e: if isinstance(e, TryLater): if qi.too_old_for_retry: db.session.delete(qi) else: qi.last_attempted_at = datetime.now() db.session.add(qi) else: err = format_exc() qi.last_attempted_at = datetime.now() qi.error = err.encode('utf-8') current_app.logger.error(err) db.session.add(qi) return else: db.session.delete(qi) finally: processed += 1 db.session.commit() @classmethod def process_incoming_queue(cls, user, max_items=None): queue_items = db.session.query(MessageQueue).filter( cls.Queries.pending_items_for_user(user)).order_by(cls.created_at) cls.process_queue(queue_items, user, max_items) def process_incoming(self, user=None): from pyaspora.diaspora.actions import process_incoming_message dmp = DiasporaMessageParser(DiasporaContact.get_by_username) ret, c_from = dmp.decode(self.body.decode('ascii'), user._unlocked_key if user else None) process_incoming_message(ret, c_from, user) @property def too_old_for_retry(self): if not self.last_attempted_at: # We'll always have two tries to ensure later items get looked at return False return self.last_attempted_at > self.created_at + timedelta(hours=24)
class Volume(BASE, CinderBase): """Represents a block storage device that can be attached to a vm.""" __tablename__ = 'volumes' __table_args__ = ( sa.Index('volumes_service_uuid_idx', 'deleted', 'service_uuid'), CinderBase.__table_args__, ) id = sa.Column(sa.String(36), primary_key=True) _name_id = sa.Column(sa.String(36)) # Don't access/modify this directly! # TODO: (Y release) Change nullable to False use_quota = Column( sa.Boolean, nullable=True, default=True, doc='Ignore volume in quota usage', ) @property def name_id(self): return self.id if not self._name_id else self._name_id @name_id.setter def name_id(self, value): self._name_id = value @property def name(self): return CONF.volume_name_template % self.name_id ec2_id = sa.Column(sa.String(255)) user_id = sa.Column(sa.String(255)) project_id = sa.Column(sa.String(255)) snapshot_id = sa.Column(sa.String(36)) cluster_name = sa.Column(sa.String(255), nullable=True) host = sa.Column(sa.String(255)) # , sa.ForeignKey('hosts.id')) size = sa.Column(sa.Integer) availability_zone = sa.Column(sa.String(255)) # TODO(vish): foreign key? status = sa.Column(sa.String(255)) # TODO(vish): enum? attach_status = sa.Column(sa.String(255)) # TODO(vish): enum migration_status = sa.Column(sa.String(255)) scheduled_at = sa.Column(sa.DateTime) launched_at = sa.Column(sa.DateTime) terminated_at = sa.Column(sa.DateTime) display_name = sa.Column(sa.String(255)) display_description = sa.Column(sa.String(255)) provider_location = sa.Column(sa.String(256)) provider_auth = sa.Column(sa.String(256)) provider_geometry = sa.Column(sa.String(255)) provider_id = sa.Column(sa.String(255)) volume_type_id = sa.Column(sa.String(36), nullable=False) source_volid = sa.Column(sa.String(36)) encryption_key_id = sa.Column(sa.String(36)) consistencygroup_id = sa.Column( sa.String(36), sa.ForeignKey('consistencygroups.id'), index=True, ) group_id = sa.Column( 'group_id', sa.String(36), sa.ForeignKey('groups.id'), index=True, ) bootable = sa.Column(sa.Boolean, default=False) multiattach = sa.Column(sa.Boolean, default=False) replication_status = sa.Column(sa.String(255)) replication_extended_status = sa.Column(sa.String(255)) replication_driver_data = sa.Column(sa.String(255)) previous_status = sa.Column(sa.String(255)) consistencygroup = relationship( ConsistencyGroup, backref="volumes", foreign_keys=consistencygroup_id, primaryjoin='Volume.consistencygroup_id == ConsistencyGroup.id', ) group = relationship( Group, backref="volumes", foreign_keys=group_id, primaryjoin='Volume.group_id == Group.id', ) service_uuid = sa.Column( sa.String(36), sa.ForeignKey('services.uuid'), nullable=True, ) service = relationship( Service, backref="volumes", foreign_keys=service_uuid, primaryjoin='Volume.service_uuid == Service.uuid', ) # make an FK of service? shared_targets = sa.Column(sa.Boolean, default=True)
class User(BaseModel): profile_id = db.Column(UUID(), db.ForeignKey('profile.id', ondelete='CASCADE'), nullable=False) profile = relationship("Profile", backref=backref("user", uselist=False)) email = db.Column(db.String, unique=True) password_hash = db.Column(db.String) is_active = db.Column(db.Boolean, default=False) admin = db.Column(db.Boolean, default=False) last_login = db.Column(db.DateTime(timezone=True), server_default=func.now()) friends = relationship( "User", secondary=friendships, primaryjoin="User.id == friendships.c.user_id", secondaryjoin="User.id == friendships.c.friend_id", backref="friended_you") def __repr__(self): return self.email @classmethod def create(cls, email, password=None): user = User.query.filter_by(email=email).first() if user: raise Exception('duplicate user create') username = email.split('@')[0] profile = Profile( username=username ) user = User( email=email, profile=profile, ) user.set_password(password) db.session.add(user) db.session.commit() return user def set_password(self, password): if not password: # Generate a random password for social users. print("Generating random password for %s" % self.email) password = bcrypt.gensalt().decode('utf-8') print('Password length', len(password)) hashed = bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt()) # This needs to be stored as a string, not bytes. self.password_hash = hashed.decode('utf-8') def check_password(self, password_attempt): match = bcrypt.hashpw(password_attempt.encode('utf-8'), self.password_hash.encode('utf-8')) == self.password_hash.encode('utf-8') if match: return True print('Password check failed') return False # For flask login @property def is_authenticated(self): return True def get_id(self): return str(self.id) def get_preferred_timezone(self): # TODO should get from profile. # local_tz = pytz.timezone(current_user.profile.timezone) return pytz.timezone('Pacific/Auckland')
class Query(Model, ExtraJSONMixin): """ORM model for SQL query Now that SQL Lab support multi-statement execution, an entry in this table may represent multiple SQL statements executed sequentially""" __tablename__ = 'query' id = Column(Integer, primary_key=True) client_id = Column(String(11), unique=True, nullable=False) database_id = Column(Integer, ForeignKey('dbs.id'), nullable=False) # Store the tmp table into the DB only if the user asks for it. tmp_table_name = Column(String(256)) user_id = Column(Integer, ForeignKey('ab_user.id'), nullable=True) status = Column(String(16), default=QueryStatus.PENDING) tab_name = Column(String(256)) sql_editor_id = Column(String(256)) schema = Column(String(256)) sql = Column(Text) # Query to retrieve the results, # used only in case of select_as_cta_used is true. select_sql = Column(Text) executed_sql = Column(Text) # Could be configured in the superset config. limit = Column(Integer) limit_used = Column(Boolean, default=False) select_as_cta = Column(Boolean) select_as_cta_used = Column(Boolean, default=False) progress = Column(Integer, default=0) # 1..100 # # of rows in the result set or rows modified. rows = Column(Integer) error_message = Column(Text) # key used to store the results in the results backend results_key = Column(String(64), index=True) # connection_id stores id of an underlying connection to database connection_id = Column(Integer, nullable=True) # Using Numeric in place of DateTime for sub-second precision # stored as seconds since epoch, allowing for milliseconds start_time = Column(Numeric(precision=20, scale=6)) start_running_time = Column(Numeric(precision=20, scale=6)) end_time = Column(Numeric(precision=20, scale=6)) end_result_backend_time = Column(Numeric(precision=20, scale=6)) tracking_url = Column(Text) changed_on = Column( DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=True) database = relationship( 'Database', foreign_keys=[database_id], backref=backref('queries', cascade='all, delete-orphan')) user = relationship(security_manager.user_model, foreign_keys=[user_id]) __table_args__ = ( sqla.Index('ti_user_id_changed_on', user_id, changed_on), ) @property def limit_reached(self): return self.rows == self.limit if self.limit_used else False def to_dict(self): return { 'changedOn': self.changed_on, 'changed_on': self.changed_on.isoformat(), 'dbId': self.database_id, 'db': self.database.database_name, 'endDttm': self.end_time, 'errorMessage': self.error_message, 'executedSql': self.executed_sql, 'id': self.client_id, 'limit': self.limit, 'progress': self.progress, 'rows': self.rows, 'schema': self.schema, 'ctas': self.select_as_cta, 'serverId': self.id, 'sql': self.sql, 'sqlEditorId': self.sql_editor_id, 'startDttm': self.start_time, 'state': self.status.lower(), 'tab': self.tab_name, 'tempTable': self.tmp_table_name, 'userId': self.user_id, 'user': user_label(self.user), 'limit_reached': self.limit_reached, 'resultsKey': self.results_key, 'trackingUrl': self.tracking_url, 'extra': self.extra, } @property def name(self): """Name property""" ts = datetime.now().isoformat() ts = ts.replace('-', '').replace(':', '').split('.')[0] tab = (self.tab_name.replace(' ', '_').lower() if self.tab_name else 'notab') tab = re.sub(r'\W+', '', tab) return f'sqllab_{tab}_{ts}' @property def database_name(self): return self.database.name @property def username(self): return self.user.username
class DiasporaContact(db.Model): __tablename__ = 'diaspora_contacts' contact_id = Column(Integer, ForeignKey('contacts.id'), primary_key=True) guid = Column(String, nullable=False, unique=True) username = Column(String, nullable=False, unique=True) server = Column(String, nullable=False) contact = relationship('Contact', single_parent=True, backref=backref('diasp', uselist=False)) @classmethod def get_for_contact(cls, contact, commit=True): if contact.diasp: return contact.diasp assert (contact.user) hostname = urlsplit(request.url)[1] server = urlunsplit(list(urlsplit(request.url)[0:2]) + ['/', '', '']) diasp = cls(server=server, guid=str(uuid4()), username="******".format(contact.user.id, hostname), contact=contact) db.session.add(diasp) if commit: db.session.commit() return diasp @classmethod def get_by_guid(cls, guid): return db.session.query(cls).filter(cls.guid == guid).first() @classmethod def get_by_username(cls, addr, import_contact=True, commit=True): dcontact = db.session.query(DiasporaContact).filter( cls.username == addr).first() if dcontact: return dcontact if import_contact: contact = cls.import_contact(addr) if commit: db.session.commit() return contact @classmethod def import_contact(cls, addr): """ Fetch information about a Diaspora user and import it into the Contact provided. """ try: wf = WebfingerRequest(addr).fetch() except URLError as e: current_app.logger.warning(e) current_app.logger.warning(e.readlines()) return None if not wf: return None NS = {'XRD': 'http://docs.oasis-open.org/ns/xri/xrd-1.0'} c = Contact() pk = wf.xpath('//XRD:Link[@rel="diaspora-public-key"]/@href', namespaces=NS)[0] c.public_key = b64decode(pk).decode("ascii") hcard_url = wf.xpath( '//XRD:Link[@rel="http://microformats.org/profile/hcard"]/@href', namespaces=NS)[0] req = Request(hcard_url) req.add_header('User-Agent', USER_AGENT) hcard = html.parse(urlopen(req, timeout=10)) c.realname = hcard.xpath('//*[@class="fn"]')[0].text pod_loc = hcard.xpath('//*[@id="pod_location"]')[0].text photo_url = hcard.xpath('//*[@class="entity_photo"]//img/@src')[0] if photo_url: try: mp = import_url_as_mimepart(urljoin(pod_loc, photo_url)) except: current_app.logger.debug(format_exc()) else: mp.text_preview = u'(picture for {0})'.format(c.realname or '(anonymous)') c.avatar = mp username = wf.xpath('//XRD:Subject/text()', namespaces=NS)[0].split(':')[1] guid = wf.xpath(".//XRD:Link[@rel='http://joindiaspora.com/guid']", namespaces=NS)[0].get("href") server = wf.xpath( ".//XRD:Link[@rel='http://joindiaspora.com/seed_location']", namespaces=NS)[0].get("href") d = cls(contact=c, guid=guid, username=username, server=server) db.session.add(d) db.session.add(c) try: d.import_public_posts() except: current_app.logger.debug(format_exc()) return d def photo_url(self): """ Diaspora requires all contacts have pictures, even if they haven't chosen one. This call returns a default if a picture hasn't been uploaded. """ if self.contact.avatar: return url_for('contacts.avatar', contact_id=self.contact_id, _external=True) else: return url_for('static', filename='nophoto.png', _external=True) def import_public_posts(self): """ Load the JSON of public posts for this user and create local posts from them. """ url = self.server + 'people/{0}'.format(self.guid) req = Request(url) req.add_header('User-Agent', USER_AGENT) req.add_header('Accept', 'application/json') entries = json_load(urlopen(req, timeout=10)) if isinstance(entries, dict): return # Faulty node? for entry in entries: user_guid = entry['author']['guid'] username = entry['author']['diaspora_id'] user = self if self.username == username \ else DiasporaContact.get_by_username(username, commit=False) if not user or user.guid != user_guid: continue post_guid = entry['guid'] existing_post = DiasporaPost.get_by_guid(post_guid) if existing_post or not entry['public']: continue # Already imported if entry.get('root'): root_guid = entry['root']['guid'] root_post = DiasporaPost.get_by_guid(root_guid) if root_post: parent = root_post.post else: continue # Cannot find parent else: parent = None post = Post(author=user.contact, parent=parent) db.session.add(post) post.created_at = datetime.strptime(entry['created_at'], '%Y-%m-%dT%H:%M:%SZ') post.thread_modified(when=datetime.strptime( entry['interacted_at'], '%Y-%m-%dT%H:%M:%SZ')) post.add_part(MimePart(type='text/x-markdown', body=entry['text'].encode('utf-8'), text_preview=entry['text']), inline=True, order=0) post.share_with([user.contact], show_on_wall=True) post.diasp = DiasporaPost(guid=post_guid, type='public')
class DiasporaPost(db.Model): __tablename__ = 'diaspora_posts' post_id = Column(Integer, ForeignKey('posts.id'), primary_key=True) guid = Column(String, nullable=False, unique=True) type = Column(String, nullable=True) post = relationship('Post', single_parent=True, backref=backref('diasp', uselist=False)) @classmethod def get_for_post(cls, post, commit=True): if post.diasp: return post.diasp assert (post.author.user) diasp = cls(guid=str(uuid4()), post=post) db.session.add(diasp) if commit: db.session.commit() return diasp @classmethod def get_by_guid(cls, guid): return db.session.query(cls).filter(cls.guid == guid).first() def as_text(self): json = json_post(self.post, children=False) text = "\n\n".join([p['body']['text'] for p in json['parts']]) if self.post.tags: text += '\n( ' + ' '.join('#{0}'.format(t.name) for t in self.post.tags) + ' )' return text def send_to(self, targets, private=False): from pyaspora.diaspora.actions import PostMessage, PrivateMessage, \ SubPost, SubPM post = self.post assert (post.author.user) self_share = post.shared_with(post.author) assert (self_share) if self.type: # Sent before, must keep same type private = (self.type == 'private') public = (self.type == 'public') elif post.parent and post.root().diasp and post.root().diasp.type: # Reply must be of same type root_diasp = post.root().diasp private = (root_diasp.type == 'private') public = (root_diasp.type == 'public' and self_share.public) else: # Decide on visibility public = self_share.public if public: private = False self.type = 'public' elif private: self.type = 'private' else: self.type = 'limited' text = self.as_text() senders = { 'private': { 'parent': PrivateMessage, 'child': SubPM, }, 'public': { 'parent': PostMessage, 'child': SubPost, } } sender = senders['private' if private else 'public'] sender = sender['child' if post.parent else 'parent'] if public: # De-dupe by server targets = dict((c.diasp.server, c) for c in targets) for target in targets.values(): sender.send_public(post.author.user, target, post=post, text=text) else: # Can only send to followers followers = set([c.id for c in post.author.followers()]) targets = [t for t in targets if t.id in followers] for target in targets: sender.send(post.author.user, target, post=post, text=text) def reshare(self, targets, reshared_post): """ DiasporaPost <self> is a reshare of Post <reshared_post>, and we need to notify Contact <targets> that this has been reshared. Unfortunately the protocol doesn't permit us to send the accompanying message. """ from pyaspora.diaspora.actions import Reshare # De-dupe by server targets = dict((c.diasp.server, c) for c in targets) for target in targets.values(): Reshare.send_public(self.post.author.user, target, post=self.post, reshare=reshared_post) def can_reply_with(self, target): if target.name == 'self': return True if self.type and self.type == 'public': return target.name == 'wall' else: return target.name == 'existing'
class DeviceTable(Base, BaseMixin): name = Column(String(80)) klass = Column(String(80)) scans = relationship('ScanTable', backref='device')
class Article(db.Model): id = db.Column(db.Integer, primary_key=True) author_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False) author = relationship(User) content = db.Column(db.Text)
class Batch(ModelBase): """ Represents a batch of transactions, which can be executed on the ledger atomically. Use build_ledger_batch() to build a Batch object from the origin_ledger_sdk library. Transactions are executed in the order that are added using add_transaction(). Invoke lifecycle hooks to synchronize the database with the batch status: - Invoke on_begin() immediately after creating the batch, before inserting it into the database - Invoke on_submitted() once the batch has been submitted to the ledger - Invoke on_commit() once/if the batch has been completed on the ledger - Invoke on_rollback() once/if the batch has been declined on the ledger """ __tablename__ = 'ledger_batch' id = sa.Column(sa.Integer(), primary_key=True, index=True) created = sa.Column(sa.DateTime(timezone=True), server_default=sa.func.now()) state: BatchState = sa.Column(sa.Enum(BatchState), nullable=False) # Time when batch was LAST submitted to ledger (if at all) submitted = sa.Column(sa.DateTime(timezone=True), nullable=True) # Relationships user_id = sa.Column(sa.Integer(), sa.ForeignKey('auth_user.id'), index=True, nullable=False) user = relationship('User', foreign_keys=[user_id]) transactions = relationship('Transaction', back_populates='batch', uselist=True, order_by='asc(Transaction.order)') # The handle returned by the ledger used to enquiry for status handle = sa.Column(sa.String()) # How many times the ledger has been polled, asking for batch status poll_count = sa.Column(sa.Integer(), nullable=False, default=0) def add_transaction(self, transaction): """ :param Transaction transaction: """ transaction.order = len(self.transactions) self.transactions.append(transaction) def add_all_transactions(self, transactions): """ :param collections.abc.Iterable[Transaction] transactions: """ for transaction in transactions: self.add_transaction(transaction) def on_begin(self): self.state = BatchState.PENDING for transaction in self.transactions: transaction.on_begin() def on_submitted(self, handle): """ :param str handle: """ self.state = BatchState.SUBMITTED self.handle = handle self.submitted = func.now() def on_commit(self): self.state = BatchState.COMPLETED for transaction in self.transactions: transaction.on_commit() def on_rollback(self): self.state = BatchState.DECLINED session = Session.object_session(self) for transaction in reversed(self.transactions): transaction.on_rollback() session.delete(transaction) def build_ledger_batch(self): """ :rtype: ols.Batch """ batch = ols.Batch(self.user.key.PrivateKey()) for transaction in self.transactions: batch.add_request(transaction.build_ledger_request()) return batch
class RetireTransaction(Transaction): """ Retires parent_ggo to the provided measurement of the provided meteringpoint. The sum of the target GGOs must be equal to the parent_ggo's amount. """ __abstract__ = False __tablename__ = 'ledger_transaction' __mapper_args__ = {'polymorphic_identity': 'retire'} __table_args__ = ( {'extend_existing': True}, ) # The begin of the measurement begin = sa.Column(sa.DateTime(timezone=True)) # The meteringpoint which the measurement were published to meteringpoint_id = sa.Column(sa.Integer(), sa.ForeignKey('accounts_meteringpoint.id')) meteringpoint = relationship('MeteringPoint', foreign_keys=[meteringpoint_id]) # Ledger address of the measurement to retire GGO to measurement_address = sa.Column(sa.String()) @staticmethod def build(ggo, meteringpoint, measurement_address): """ Retires the provided GGO to the measurement at the provided address. The provided meteringpoint :param Ggo ggo: :param MeteringPoint meteringpoint: :param str measurement_address: :rtype: RetireTransaction """ ggo.retire_gsrn = meteringpoint.gsrn ggo.retire_address = measurement_address return RetireTransaction( parent_ggo=ggo, begin=ggo.begin, meteringpoint=meteringpoint, measurement_address=measurement_address, ) def on_begin(self): self.parent_ggo.stored = False self.parent_ggo.retired = True self.parent_ggo.locked = True self.parent_ggo.synchronized = False def on_commit(self): self.parent_ggo.stored = False self.parent_ggo.retired = True self.parent_ggo.locked = False self.parent_ggo.synchronized = True def on_rollback(self): self.parent_ggo.stored = True # TODO test this self.parent_ggo.retired = False self.parent_ggo.locked = False self.parent_ggo.synchronized = True self.parent_ggo.retire_gsrn = None # TODO test this self.parent_ggo.retire_address = None # TODO test this def build_ledger_request(self): """ :rtype: ols.RetireGGORequest """ measurement_key = KeyGenerator.get_key_for_measurement( self.meteringpoint, self.begin) settlement_address = ols.generate_address( ols.AddressPrefix.SETTLEMENT, measurement_key.PublicKey()) return ols.RetireGGORequest( settlement_address=settlement_address, measurement_address=self.measurement_address, measurement_private_key=measurement_key.PrivateKey(), parts=[ ols.RetireGGOPart( address=ols.generate_address(ols.AddressPrefix.GGO, self.parent_ggo.key.PublicKey()), private_key=self.parent_ggo.key.PrivateKey(), ) ], )
def parent_ggo(cls): return relationship('Ggo', foreign_keys=[cls.parent_ggo_id])
class SplitTransaction(Transaction): """ Splits parent_ggo into multiple new GGOs. The sum of the target GGOs must be equal to the parent_ggo's amount. """ __abstract__ = False __tablename__ = 'ledger_transaction' __mapper_args__ = {'polymorphic_identity': 'split'} __table_args__ = ( {'extend_existing': True}, ) # The target GGOs (children) targets = relationship('SplitTarget', back_populates='transaction', uselist=True) def add_target(self, ggo, reference=None): """ :param Ggo ggo: :param str reference: """ self.targets.append(SplitTarget( transaction=self, reference=reference, ggo=ggo, )) def on_begin(self): assert sum(t.ggo.amount for t in self.targets) == self.parent_ggo.amount assert self.parent_ggo.stored is True assert self.parent_ggo.retired is False assert self.parent_ggo.locked is False assert self.parent_ggo.synchronized is True self.parent_ggo.stored = False self.parent_ggo.locked = True self.parent_ggo.synchronized = False for target in self.targets: target.ggo.stored = True target.ggo.locked = True target.ggo.synchronized = False def on_commit(self): self.parent_ggo.stored = False self.parent_ggo.locked = False self.parent_ggo.synchronized = True for target in self.targets: target.ggo.stored = True target.ggo.locked = False target.ggo.synchronized = True def on_rollback(self): self.parent_ggo.stored = True self.parent_ggo.locked = False self.parent_ggo.synchronized = True session = Session.object_session(self) for target in self.targets: session.delete(target) session.delete(target.ggo) def build_ledger_request(self): """ :rtype: ols.SplitGGORequest """ parts = [] for target in self.targets: parts.append(ols.SplitGGOPart( address=target.ggo.address, amount=target.ggo.amount, )) return ols.SplitGGORequest( source_private_key=self.parent_ggo.key.PrivateKey(), source_address=self.parent_ggo.address, parts=parts, )
def _setup_stock_mapping(cls): ( Node, composite_pk_table, users, Keyword, items, Dingaling, order_items, item_keywords, Item, User, dingalings, Address, keywords, CompositePk, nodes, Order, orders, addresses, ) = ( cls.classes.Node, cls.tables.composite_pk_table, cls.tables.users, cls.classes.Keyword, cls.tables.items, cls.classes.Dingaling, cls.tables.order_items, cls.tables.item_keywords, cls.classes.Item, cls.classes.User, cls.tables.dingalings, cls.classes.Address, cls.tables.keywords, cls.classes.CompositePk, cls.tables.nodes, cls.classes.Order, cls.tables.orders, cls.tables.addresses, ) # use OrderedDict on this one to support some tests that # assert the order of attributes (e.g. orm/test_inspect) cls.mapper_registry.map_imperatively( User, users, properties=util.OrderedDict([ ( "addresses", relationship(Address, backref="user", order_by=addresses.c.id), ), ( "orders", relationship(Order, backref="user", order_by=orders.c.id), ), # o2m, m2o ]), ) cls.mapper_registry.map_imperatively( Address, addresses, properties={ # o2o "dingaling": relationship(Dingaling, uselist=False, backref="address") }, ) cls.mapper_registry.map_imperatively(Dingaling, dingalings) cls.mapper_registry.map_imperatively( Order, orders, properties={ # m2m "items": relationship(Item, secondary=order_items, order_by=items.c.id), "address": relationship(Address), # m2o }, ) cls.mapper_registry.map_imperatively( Item, items, properties={ "keywords": relationship(Keyword, secondary=item_keywords) # m2m }, ) cls.mapper_registry.map_imperatively(Keyword, keywords) cls.mapper_registry.map_imperatively( Node, nodes, properties={ "children": relationship(Node, backref=backref("parent", remote_side=[nodes.c.id])) }, ) cls.mapper_registry.map_imperatively(CompositePk, composite_pk_table) configure_mappers()
def batch(cls): return relationship('Batch', foreign_keys=[cls.batch_id], back_populates='transactions')
class Student(Base): """ Модель "Студенты" Статус: Выполняется """ __tablename__ = 'univer_students' __table_args__ = { 'implicit_returning': False } # Идентификатор id = Column('students_id', Integer, primary_key=True) # Пользователь user_id = Column('user_id', ForeignKey('univer_users.user_id')) user = relationship('User') # Статус status = Column('status', Integer) # Дата регистрации reg_date = Column('student_reg_date', DateTime) # Ступень обучения stage_id = Column(ForeignKey('univer_stage.stage_id')) stage = relationship('Stage') # Уровень обучения edu_level_id = Column('edu_levels_id', ForeignKey('univer_edu_levels.edu_level_id')) edu_level = relationship('EduLevel') # Форма обучения education_form_id = Column(ForeignKey('univer_education_form.education_form_id')) education_form = relationship('EducationForm') # Тип поступления enrollment_type_id = Column(ForeignKey('univer_enrollment_type.enrollment_type_id')) enrollment_type = relationship('EnrollmentType') # Тип оплаты payment_form_id = Column('payment_forms_id', ForeignKey('univer_payment_forms.payment_form_id')) payment_form = relationship('PaymentForm') country_id = Column(ForeignKey('univer_country.country_id')) country = relationship('Country') # Пол sex = Column('students_sex', Integer) marital_status = Column('students_marital_status', Integer) # Дата рождения birth_date = Column('students_birth_date', DateTime) # ФИО студента last_name = Column('students_sname', String(100)) first_name = Column('students_name', String(100)) middle_name = Column('students_father_name', String(100)) # ФИО студента в дательном падеже (На русском языке) dative_last_name_ru = Column('students_dative_sname_ru', String(100)) dative_first_name_ru = Column('students_dative_name_ru', String(100)) dative_middle_name_ru = Column('students_dative_father_name_ru', String(100)) # ФИО студента в дательном падеже (На казахском языке) dative_last_name_kz = Column('students_dative_sname_kz', String(100)) dative_first_name_kz = Column('students_dative_name_kz', String(100)) dative_middle_name_kz = Column('students_dative_father_name_kz', String(100)) # ФИО студента в дательном падеже (На английском языке) dative_last_name_en = Column('students_dative_sname_en', String(100)) dative_first_name_en = Column('students_dative_name_en', String(100)) dative_middle_name_en = Column('students_dative_father_name_en', String(100)) # Фамилия и Имя студента транслитом last_name_translit = Column('students_sname_intern', String(100)) first_name_translit = Column('students_name_intern', String(100)) # Электронная почта email = Column('students_email', String(25)) # Курс course = Column('students_curce_number', Integer) # Документ document_identity_type_id = Column('students_document_identity_type', ForeignKey('univer_document_identity.document_identity_type')) document_identity_type = relationship('DocumentIdentity') document_identity_number = Column('students_document_identity_number', String(50)) document_identity_date = Column('students_document_identity_date', DateTime) document_identity_issued = Column('students_document_identity_issued', String(100)) # ИИН студента identify_code = Column('students_identify_code', String(50)) # Данные об окончании учебного заведения перед поступлением в университет graduate_info_id = Column(ForeignKey('univer_graduate_info.graduate_info_id')) graduate_info = relationship('GraduateInfo') # Факультет faculty_id = Column(ForeignKey('univer_faculty.faculty_id')) faculty = relationship('Faculty') # ОП speciality_id = Column(ForeignKey('univer_speciality.speciality_id')) speciality = relationship('Speciality') # Год начала действия учебного плана educ_plan_adm_year = Column(Integer) @property def payment_info_ru(self): if self.payment_form_id == 2: return 'на платной основе' elif self.payment_form_id == 5: return 'на основе государственного образовательного гранта' @property def payment_info_kz(self): if self.payment_form_id == 2: return 'ақылы негізде' elif self.payment_form_id == 5: return 'мемлекеттік білім беру гранты негізінде' @property def edu_level_info_ru(self): if self.edu_level_id == 1: return '(бакалавриат, 4 года)' elif self.edu_level_id == 3: return 'по сокращенной образовательной программе на базе среднего профессионального образования' elif self.edu_level_id == 2: return 'по сокращенной образовательной программе на базе высшего образования' @property def edu_level_info_kz(self): if self.edu_level_id == 1: return '(бакалавриат, 4 жыл)' elif self.edu_level_id == 3: return 'орта кәсіптік білім негізінде қысқартылған білім беру бағдарламасы бойынша күндізгі білім беру нысаны' elif self.edu_level_id == 2: return 'жоғары білім негізінде қысқартылған білім беру бағдарламасы бойынша күндізгі білім беру нысаны' @property def dative_full_name_ru(self): dative_last_name_ru = self.dative_last_name_ru if self.dative_last_name_ru else self.last_name dative_first_name_ru = self.dative_first_name_ru if self.dative_first_name_ru else self.first_name dative_middle_name_ru = self.dative_middle_name_ru if self.dative_middle_name_ru else self.middle_name return ' '.join(filter(None, [dative_last_name_ru, dative_first_name_ru, dative_middle_name_ru])) @property def dative_full_name_kz(self): dative_last_name_kz = self.dative_last_name_kz if self.dative_last_name_kz else self.last_name dative_first_name_kz = self.dative_first_name_kz if self.dative_first_name_kz else self.first_name dative_middle_name_kz = self.dative_middle_name_kz if self.dative_middle_name_kz else self.middle_name return ' '.join(filter(None, [dative_last_name_kz, dative_first_name_kz, dative_middle_name_kz])) @property def dative_full_name_en(self): return ' '.join(filter(None, [self.dative_last_name_en, self.dative_first_name_en, self.dative_middle_name_en])) def __repr__(self): return '<Student {} (id={} user={} status={})>'.format(self, self.id, self.user_id, self.status) def __str__(self): return ' '.join(filter(None, [self.last_name, self.first_name, self.middle_name]))
class Comic(Base): __tablename__ = 'comics' __table_args__ = {'sqlite_autoincrement': True, 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'} global mysql_active if mysql_active: path = Column(String(1000), unique=True) fingerprint = Column(String(128)) folder = Column(String(1000)) file = Column(String(1000)) series = Column(String(1000)) issue = Column(String(100)) comments = Column(Text) publisher = Column(String(256)) title = Column(String(1000)) imprint = Column(String(1000)) weblink = Column(String(1000)) hash = Column(String(1000)) language = Column(String(100)) comicbookvine = Column(String(64)) #thumbnail = Column(LargeBinary(1024*1024*10*10)) thumbnail = deferred(Column(LargeBinary(1024*1024*3))) alternateIssue = Column(String(1000)) alternateseries_raw = relationship('AlternateSeries', secondary=comics_alternateseries_table, cascade="save-update,delete") #, backref='comics') credits_raw = relationship('Credit', #secondary=credits_, cascade="all, delete", )#, backref='comics') characters_raw = relationship('Character', secondary=comics_characters_table, cascade="save-update,delete")#, backref='comics') teams_raw = relationship('Team', secondary=comics_teams_table, cascade="save-update,delete") #)#, backref='comics') locations_raw = relationship('Location', secondary=comics_locations_table, cascade="save-update,delete") #, backref='comics') storyarcs_raw = relationship('StoryArc', secondary=comics_storyarcs_table, cascade="save-update,delete") #, backref='comics') generictags_raw = relationship('GenericTag', secondary=comics_generictags_table, cascade="save-update,delete") #, backref='comics') genres_raw = relationship('Genre', secondary=comics_genres_table, cascade="save-update,delete") #, backref='comics') blacklist_raw = relationship('Blacklist', secondary=comics_blacklist_table, cascade="save-update,delete") #, backref='comics') else: path = Column(String, unique=True) fingerprint = Column(String) folder = Column(String) file = Column(String) series = Column(String) issue = Column(String) comments = Column(Text) publisher = Column(String) title = Column(String) imprint = Column(String) weblink = Column(String) hash = Column(String) language = Column(String) thumbnail = deferred(Column(LargeBinary)) alternateIssue = Column(String) comicbookvine = Column(String) alternateseries_raw = relationship('AlternateSeries', secondary=comics_alternateseries_table, cascade="save-update,delete") #, backref='comics') credits_raw = relationship('Credit', #secondary=credits_, cascade="all, delete", )#, backref='comics') characters_raw = relationship('Character', secondary=comics_characters_table, cascade="save-update,delete")#, backref='comics') teams_raw = relationship('Team', secondary=comics_teams_table, cascade="save-update,delete") #)#, backref='comics') locations_raw = relationship('Location', secondary=comics_locations_table, cascade="save-update,delete") #, backref='comics') storyarcs_raw = relationship('StoryArc', secondary=comics_storyarcs_table, cascade="save-update,delete") #, backref='comics') generictags_raw = relationship('GenericTag', secondary=comics_generictags_table, cascade="save-update,delete") #, backref='comics') genres_raw = relationship('Genre', secondary=comics_genres_table, cascade="save-update,delete") #, backref='comics') blacklist_raw = relationship('Blacklist', secondary=comics_blacklist_table, cascade="save-update,delete") #, backref='comics') persons_raw = relationship("Person", secondary="join(Credit, Person, Credit.person_id == Person.id)", primaryjoin="and_(Comic.id == Credit.comic_id)", #passive_updates=False, viewonly=True ) roles_raw = relationship("Role", secondary="join(Credit, Role, Credit.role_id == Role.id)", primaryjoin="and_(Comic.id == Credit.comic_id)", #passive_updates=False, viewonly=True ) filesize = Column(BigInteger) id = Column(Integer, primary_key=True) issue_num = Column(Float) date = Column(DateTime) # will be a composite of month,year,day for sorting/filtering day = Column(Integer) month = Column(Integer) year = Column(Integer) volume = Column(Integer) page_count = Column(Integer) deleted_ts = Column(DateTime) lastread_ts = Column(DateTime) lastread_page = Column(Integer) alternateNumber = Column(Float) #hash = Column(String) added_ts = Column(DateTime, default=datetime.utcnow) # when the comic was added to the DB mod_ts = Column(DateTime) # the last modified date of the file """ # chanhef to all instead of save-update alternateseries_raw = relationship('AlternateSeries', secondary=comics_alternateseries_table, cascade="save-update,delete", backref='comics') credits_raw = relationship('Credit',secondary=credits,cascade="save-update, delete", backref='comics') characters_raw = relationship('Character', secondary=comics_characters_table,cascade="save-update ,delete", backref='comics') teams_raw = relationship('Team', secondary=comics_teams_table,cascade="save-update ,delete", backref='comics') locations_raw = relationship('Location', secondary=comics_locations_table,cascade="save-update ,delete", backref='comics') storyarcs_raw = relationship('StoryArc', secondary=comics_storyarcs_table,cascade="save-update ,delete", backref='comics') generictags_raw = relationship('GenericTag', secondary=comics_generictags_table,cascade="save-update, delete", backref='comics') genres_raw = relationship('Genre', secondary=comics_genres_table,cascade="save-update, delete", backref='comics') """ #credits = association_proxy('credits_raw', 'person_role_dict') alternateseries = association_proxy('alternateseries_raw', 'name') characters = association_proxy('characters_raw', 'name') teams = association_proxy('teams_raw', 'name') locations = association_proxy('locations_raw', 'name') storyarcs = association_proxy('storyarcs_raw', 'name') generictags = association_proxy('generictags_raw', 'name') persons = association_proxy('persons_raw', 'name') roles = association_proxy('roles_raw', 'name') genres = association_proxy('genres_raw', 'name') blacklist = association_proxy('blacklist_raw', 'hash') #blacklist = relationship("Blacklist", cascade="save-update,delete") #uselist=False, def __repr__(self): out = u"<Comic(id={0}, path={1},\n series={2}, issue={3}, year={4} pages={5}\n{6}".format( self.id, self.folder+self.file,self.series,self.issue,self.year,self.page_count,self.characters) return out @property def credits(self): """Merge credits together into a dict with role name as key, and lists of persons""" out_dict = {} # iterate over the list of credits mini dicts: for c in self.credits_raw: if c.role and c.person: if not out_dict.has_key(c.role.name): out_dict[c.role.name] = [] out_dict[c.role.name].append(c.person.name) return out_dict
class Observation(Base): __tablename__ = 'observation' id = Column(Integer, autoincrement=True, primary_key=True) timestamp = Column(Integer) station_id = Column(Integer, ForeignKey('station.id')) # Temperature air_temperature_2m = Column(Float) air_temperature_2m_minimum_over_10min = Column(Float) air_temperature_2m_minimum_over_6h = Column(Float) air_temperature_2m_minimum_over_12h = Column(Float) air_temperature_2m_minimum_over_14h = Column(Float) air_temperature_10cm_minimum_over_10min = Column(Float) air_temperature_10cm_minimum_over_6h = Column(Float) air_temperature_10cm_minimum_over_12h = Column(Float) air_temperature_10cm_minimum_over_14h = Column(Float) air_temperature_2m_maximum_over_10min = Column(Float) air_temperature_2m_maximum_over_6h = Column(Float) air_temperature_2m_maximum_over_12h = Column(Float) air_temperature_2m_maximum_over_24h = Column(Float) # Humidity dew_point = Column(Float) relative_humidity = Column(Float) # Wind wind_speed = Column(Float) wind_direction = Column(Float) wind_gust = Column(Float) # Pressure air_pressure_at_sea_level = Column(Float) # Precipitation rain_duration_past_1h = Column(Float) rain_amount_past_1h = Column(Float) rain_amount_past_6h = Column(Float) rain_amount_past_12h = Column(Float) rain_amount_past_24h = Column(Float) precipitation_duration_past_10min_rain_gauge = Column(Float) precipitation_duration_past_10min_pws = Column(Float) precipitation_intensity_past_10min_rain_gauge = Column(Float) precipitation_intensity_past_10min_pws = Column(Float) # Clouds cloud_base_height = Column(Float) cloud_base_height_layer_1 = Column(Float) cloud_base_height_layer_2 = Column(Float) cloud_base_height_layer_3 = Column(Float) cloud_cover_total = Column(Float) cloud_cover_layer_1 = Column(Float) cloud_cover_layer_2 = Column(Float) cloud_cover_layer_3 = Column(Float) # Radiation global_solar_radiation_past_10min = Column(Float) sunshine_duration = Column(Float) # Weather visibility = Column(Float) weather_code = Column(Integer) weather_code_past_10min = Column(Integer) present_weather = Column(Integer) station = relationship('Station') def to_dict(self) -> dict: result = self.__dict__ result['station'] = self.station.to_dict() # remove unneeded keys result.pop('id') result.pop('_sa_instance_state') return result
class Proyecto(Base): __tablename__ = "proyecto" idProyecto = Column(Integer, primary_key=True, autoincrement=True) nombreProyecto = Column(String(50)) descProyecto = Column(String(150)) modulos = relationship("Modulo", back_populates="proyecto", lazy=True) @classmethod def create(cls, idProyecto, nombreProyecto, descProyecto): proyecto = Proyecto(idProyecto=idProyecto, nombreProyecto=nombreProyecto, descProyecto=descProyecto) return proyecto.save() def save(self): try: db_session.add(self) db_session.commit() return self except: db_session.rollback() return False def delete(self): try: db_session.delete(self) db_session.commit() return True except: db_session.rollback() return False def toJson(self): return { "idProyecto": self.idProyecto, "nombreProyecto": self.nombreProyecto, "descProyecto": self.descProyecto, } def toJsonWithModules(self): return { "idProyecto": self.idProyecto, "nombreProyecto": self.nombreProyecto, "descProyecto": self.descProyecto, "modulos": [{ "idModulo": m.idModulo, "nombreModulo": m.nombreModulo, "descModulo": m.descModulo, "proyecto_id": m.proyecto_id, } for m in self.modulos] } def toJsonWithModulesAndFunctions(self): return { "idProyecto": self.idProyecto, "nombreProyecto": self.nombreProyecto, "descProyecto": self.descProyecto, "modulos": [ { "idModulo": m.idModulo, "nombreModulo": m.nombreModulo, "descModulo": m.descModulo, "proyecto_id": m.proyecto_id, "funciones": [ { "idFuncion": f.idFuncion, "nombreFuncion": f.nombreFuncion, "numCampos": f.numCampos, "numObjetos": f.numObjetos, "complejidad": f.complejidad, "modulo_id": f.modulo_id } for f in m.funciones ] } for m in self.modulos ] }
class Person(db.Model): """Main Table""" __tablename__ = 'corona__person' uuid = db.Column(UUID(as_uuid=True), unique=True, server_default=text("uuid_generate_v4()")) id = db.Column(db.Integer, index=True, primary_key=True) # ---- Basic Person details ---- # name = db.Column(db.String(64), index=True, nullable=False) gender = db.Column(db.String(10), index=True, nullable=False) age = db.Column(db.Integer, nullable=False) address = db.Column(db.String(128), nullable=False) town = db.Column(db.String(40), nullable=False) phone = db.Column(db.Unicode(20), nullable=False) location = db.Column(db.String(64), nullable=False) coordinates = db.Column(JSON, nullable=False) type_of_person = db.Column(db.String(15), index=True, nullable=False) # ---- Meta data ---- # created_at = db.Column(db.DateTime, index=True, server_default=func.now()) updated_at = db.Column( db.DateTime, index=True, server_default=func.now()) # ToDo: fix auto updation # ---- Relationships ---- # interaction_from = relationship('Association', backref='suspect__interaction', primaryjoin=(id == Association.suspect_id)) interaction_to = relationship('Association', backref='patient__interaction', primaryjoin=(id == Association.patient_id)) def to_json(self): json_person = { 'id': 'Pta / cov / {}'.format(self.id), 'name': self.name, 'created_at': self.created_at, 'updated_at': self.updated_at } return json_person def complete_json(self): json_person = { 'id': 'Pta / cov / {}'.format(self.id), 'name': self.name, 'gender': self.gender, 'age': self.age, 'address': self.address, 'town': self.town, 'phone': self.phone, 'location': { 'value': self.location, 'coordinates': self.coordinates }, 'type_of_person': self.type_of_person, 'created_at': self.created_at, 'updated_at': self.updated_at } if self.type_of_person == 'suspect' and len( self.interaction_from) != 0: """self.interaction_from is an array""" json_person['category_of_suspect'] = self.interaction_from[ 0].category_of_suspect json_person['severity'] = self.interaction_from[0].severity return json_person # method tells Python how to print objects of this class def __repr__(self): return '<Person {}>'.format(self.id)
class Asset(base.Base, mixins.IdentityMixin, mixins.CreatedUpdatedMixin): __tablename__ = 'asset' __mapper_args__ = { 'polymorphic_identity': 'Asset', } project_id = Column( 'project_id', Integer, ForeignKey('project.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False, unique=False, ) sequence_id = Column( 'sequence_id', Integer, ForeignKey('sequence.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False, unique=False, ) shot_id = Column( 'shot_id', Integer, ForeignKey('shot.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False, unique=False, ) name_id = Column( 'name_id', Integer, ForeignKey('name.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False, unique=False, ) subname_id = Column( 'subname_id', Integer, ForeignKey('subname.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=True, unique=False, ) variant_id = Column( 'variant_id', Integer, ForeignKey('variant.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=True, unique=False, ) type_id = Column( 'type_id', Integer, ForeignKey('type.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=True, unique=False, ) resolution_id = Column( 'resolution_id', Integer, ForeignKey('resolution.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=True, unique=False, ) instance_id = Column( 'instance_id', Integer, ForeignKey('instance.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=True, unique=False, ) project = relationship('Project') sequence = relationship('Sequence') shot = relationship('Shot') name = relationship('Name') subname = relationship('Subname') variant = relationship('Variant') type = relationship('Type') resolution = relationship('Resolution') instance = relationship('Instance') asset_versions = relationship('AssetVersion') tags = relationship( 'Tag', secondary='asset_tag', ) key_values = relationship( 'AssetKeyValue', secondary='asset_key_value', secondaryjoin='AssetKeyValue.asset_id==Asset.id', ) def __init__(self, **kwargs): super(Asset, self).__init__() self._setKeywordFields(**kwargs) def get_path(self): project = self.project.name sequence = self.sequence.name shot = self.shot.name name = self.name.name subname = self.subname.name variant = self.variant.name instance = self.instance.name resolution = self.resolution.name type = self.type.name path = os.path.join(os.path.sep, project, sequence, shot, name, subname, instance, variant, resolution, type) return path path = property(get_path)
class ArtistAlbum(Base): artist_id = Column(Integer, ForeignKey("Artist.id")) artists = relationship("Artist", back_populates="albums") name = Column(String())
class Photo(db.Model): __tablename__ = 'photos' id = db.Column(db.Integer, primary_key = True) pic = db.Column(db.String(50)) title = db.Column(db.String(50)) taken_str = db.Column(db.String(25)) taken = db.Column(db.DateTime) lat = db.Column(db.Float) lon = db.Column(db.Float) taken_lat = db.Column(db.Float) taken_lon = db.Column(db.Float) location_id = db.Column(db.ForeignKey(Location.id)) location = relationship("Location", back_populates="photos") collection_id = db.Column(db.ForeignKey(Collection.id)) collection = relationship("Collection", back_populates="c_photos") def __init__(self, pic, taken, coords, taken_coords, loc, title): self.location = loc self.pic = pic self.taken = taken self.lat = coords[0] self.lon = coords[1] self.taken_lat = taken_coords[0] self.taken_lon = taken_coords[1] self.title = title if loc: self.location_id = loc.id db.session.add(self) db.session.commit() def update(self, pic, taken, coords, taken_coords, loc, title): self.location = loc self.pic = pic self.taken = taken self.lat = coords[0] self.lon = coords[1] self.taken_lat = taken_coords[0] self.taken_lon = taken_coords[1] self.title = title if loc: self.location_id = loc.id db.session.commit() def fetch_by_id(id): return db.session.query(Photo).get(id) def remove_collection(self): if self.collection_id: self.collection_id = None db.session.commit() def fetch_all(): photos = db.session.query(Photo).all() photos_json = [] for photo in photos: taken = photo.taken.strftime("%Y") photos_json.append({"lat":photo.lat, "lng":photo.lon, "taken":taken, "title":photo.title, "url":"https://res.cloudinary.com/dixpjmvss/image/upload/" + photo.pic, "thumbnail":"https://res.cloudinary.com/dixpjmvss/image/upload/" + photo.pic, "name":"please work"}) # for photo in photos: # geometry = { "type": "Point", "coordinates": [photo.lon, photo.lat]} # photos_json.append(Feature(properties={'title':photo.pic, 'name':"NAME"}, # geometry=geometry)) return photos_json def __repr__(self): return '<Photo %r>' % (self.id)
class Tag(MailSyncBase, HasRevisions): """Tags represent extra data associated with threads. A note about the schema. The 'public_id' of a tag is immutable. For reserved tags such as the inbox or starred tag, the public_id is a fixed human-readable string. For other tags, the public_id is an autogenerated uid similar to a normal public id, but stored as a string for compatibility. The name of a tag is allowed to be mutable, to allow for the eventuality that users wish to change the name of user-created labels, or that we someday expose localized names ('DAS INBOX'), or that we somehow manage to sync renamed gmail labels, etc. """ namespace = relationship( Namespace, backref=backref( 'tags', primaryjoin='and_(Tag.namespace_id == Namespace.id, ' 'Tag.deleted_at.is_(None))', collection_class=attribute_mapped_collection('public_id')), primaryjoin='and_(Tag.namespace_id==Namespace.id, ' 'Namespace.deleted_at.is_(None))', load_on_pending=True) # (Because this class inherits from HasRevisions, we need # load_on_pending=True here so that setting Transaction.namespace in # Transaction.set_extra_attrs() doesn't raise an IntegrityError.) namespace_id = Column(Integer, ForeignKey('namespace.id', ondelete='CASCADE'), nullable=False) public_id = Column(String(MAX_INDEXABLE_LENGTH), nullable=False, default=generate_public_id) name = Column(String(MAX_INDEXABLE_LENGTH), nullable=False) user_created = Column(Boolean, server_default=false(), nullable=False) RESERVED_PROVIDER_NAMES = [ 'gmail', 'outlook', 'yahoo', 'exchange', 'inbox', 'icloud', 'aol' ] CANONICAL_TAG_NAMES = [ 'inbox', 'archive', 'drafts', 'sending', 'sent', 'spam', 'starred', 'trash', 'unread', 'unseen', 'attachment' ] RESERVED_TAG_NAMES = [ 'all', 'archive', 'drafts', 'send', 'replied', 'file', 'attachment', 'unseen' ] # Tags that are allowed to be both added and removed via the API. USER_MUTABLE_TAGS = [ 'unread', 'starred', 'spam', 'trash', 'inbox', 'archive' ] @property def user_removable(self): # The 'unseen' tag can only be removed. return (self.user_created or self.public_id in self.USER_MUTABLE_TAGS or self.public_id == 'unseen') @classmethod def create_canonical_tags(cls, namespace, db_session): """If they don't already exist yet, create tags that should always exist.""" existing_canonical_tags = db_session.query(Tag).filter( Tag.namespace_id == namespace.id, Tag.public_id.in_(cls.CANONICAL_TAG_NAMES)).all() missing_canonical_names = set(cls.CANONICAL_TAG_NAMES).difference( {tag.public_id for tag in existing_canonical_tags}) for canonical_name in missing_canonical_names: tag = Tag(namespace=namespace, public_id=canonical_name, name=canonical_name) db_session.add(tag) @classmethod def name_available(cls, name, namespace_id, db_session): if any(name.lower().startswith(provider) for provider in cls.RESERVED_PROVIDER_NAMES): return False if name in cls.RESERVED_TAG_NAMES or name in cls.CANONICAL_TAG_NAMES: return False if (name,) in db_session.query(Tag.name). \ filter(Tag.namespace_id == namespace_id).all(): return False return True @property def user_addable(self): return (self.user_created or self.public_id in self.USER_MUTABLE_TAGS) __table_args__ = (UniqueConstraint('namespace_id', 'name'), UniqueConstraint('namespace_id', 'public_id'))
class ParentDir(Base): path = Column(String) children = relationship("Song")
class ArticlesCache(db.Model): """ The ArticlesCache is used to increase the speed of retrieving articles for certain content filtering configurations. The calculate_hash method calculates a hash, consisting of ids of the content selection, and this is stored with the articles that belong to this. This way the correct articles can be retrieved with a dramatic increase of speed. """ __table_args__ = {"mysql_collate": "utf8_bin"} id = Column(Integer, primary_key=True) from zeeguu.core.model.article import Article article_id = Column(Integer, ForeignKey(Article.id)) article = relationship(Article) content_hash = Column(String(256)) def __init__(self, article, hash): self.article = article self.content_hash = hash def __repr__(self): return f"<Hash {self.content_hash}>" @staticmethod def calculate_hash(user, topics, filters, searches, search_filters, user_languages): def _join_ids(a_list: list): return ",".join([str(l.id) for l in a_list]) """ This method is to calculate the hash with all the content filters. It simply adds a letter for the type of content and the sorted ids of all the content that has been added. :return: """ result = "lan: " from zeeguu.core.model import User for each in user_languages: result += f"{each.code} " + str(User.levels_for(user, each)) return ( result + " top: " + _join_ids(topics) + " sear: " + _join_ids(searches) + " filt: " + _join_ids(filters) + " sear-filt: " + _join_ids(search_filters) ) @classmethod def get_articles_for_hash(cls, hash, limit): try: result = cls.query.filter(cls.content_hash == hash).limit(limit) if result is None: return None return [article_id.article for article_id in result] except Exception as e: from sentry_sdk import capture_exception capture_exception(e) return None @classmethod def check_if_hash_exists(cls, hash): result = cls.query.filter(cls.content_hash == hash).first() if result is None: return False else: return True
class Artist(Base): name = Column(String) albums = relationship("ArtistAlbum", back_populates="artists")
class Snapshot(BASE, CinderBase): """Represents a snapshot of volume.""" __tablename__ = 'snapshots' id = sa.Column(sa.String(36), primary_key=True) # TODO: (Y release) Change nullable to False use_quota = Column( sa.Boolean, nullable=True, default=True, doc='Ignore volume in quota usage', ) @property def name(self): return CONF.snapshot_name_template % self.id @property def volume_name(self): return self.volume.name # pylint: disable=E1101 user_id = sa.Column(sa.String(255)) project_id = sa.Column(sa.String(255)) volume_id = sa.Column( sa.String(36), sa.ForeignKey('volumes.id', name='snapshots_volume_id_fkey'), nullable=False, index=True, ) cgsnapshot_id = sa.Column( sa.String(36), sa.ForeignKey('cgsnapshots.id'), index=True, ) group_snapshot_id = sa.Column( sa.String(36), sa.ForeignKey('group_snapshots.id'), index=True, ) status = sa.Column(sa.String(255)) progress = sa.Column(sa.String(255)) volume_size = sa.Column(sa.Integer) scheduled_at = sa.Column(sa.DateTime) display_name = sa.Column(sa.String(255)) display_description = sa.Column(sa.String(255)) encryption_key_id = sa.Column(sa.String(36)) volume_type_id = sa.Column(sa.String(36), nullable=False) provider_location = sa.Column(sa.String(255)) provider_id = sa.Column(sa.String(255)) provider_auth = sa.Column(sa.String(255)) volume = relationship( Volume, backref="snapshots", foreign_keys=volume_id, primaryjoin='Snapshot.volume_id == Volume.id', ) cgsnapshot = relationship( CGSnapshot, backref="snapshots", foreign_keys=cgsnapshot_id, primaryjoin='Snapshot.cgsnapshot_id == CGSnapshot.id', ) group_snapshot = relationship( GroupSnapshot, backref="snapshots", foreign_keys=group_snapshot_id, primaryjoin='Snapshot.group_snapshot_id == GroupSnapshot.id', )
class Order(Base): __tablename__ = 'order_' id = Column( sqlalchemy.Integer, primary_key=True, info={ 'colanderalchemy': { 'title': 'Id заказа', # 'widget': deform.widget.TextInputWidget(readonly=True) } }) # state = Column(sqlalchemy.Enum(*EnumOrderState.get_values(), name='EnumOrderState', native_enum=False), # # TODO readonly select # info={'colanderalchemy': { # 'title': 'Состояние заказа', # 'widget': deform.widget.TextInputWidget(readonly=True) # }} # ) # TODO state as enum http://techspot.zzzeek.org/2011/01/14/the-enum-recipe/ wanted_total = Column(sqlalchemy.Numeric(12, 2), default=0.0, info={ 'colanderalchemy': { 'title': 'Общая сумма заказа', 'widget': deform.widget.TextInputWidget(readonly=True) } }) paid_amount = Column(sqlalchemy.Numeric(12, 2), default=0.0, info={ 'colanderalchemy': { 'title': 'Оплачено', 'widget': deform.widget.TextInputWidget(readonly=True) } }) refund_amount = Column(sqlalchemy.Numeric(12, 2), default=0.0, info={ 'colanderalchemy': { 'title': 'Сумма возврата', 'widget': deform.widget.TextInputWidget(readonly=True) } }) status = Column( sqlalchemy.Enum(*EnumOrderStatus.get_values(), name='enum_order_status', native_enum=False), nullable=False, default=EnumOrderStatus.cart, # TODO readonly select info={ 'colanderalchemy': { 'title': 'Состояние', 'widget': deform.widget.TextInputWidget(readonly=True) } }) user_id = Column(sqlalchemy.Integer, ForeignKey('user_.id'), info={ 'colanderalchemy': { 'title': 'Пользователь', 'widget': deform.widget.TextInputWidget(readonly=True) } }) # relations order_goods = relationship('OrderGood', back_populates='order') """ :type OrderGood[] """ # TODO how to doc type? @staticmethod def by_id(id_: int): """ :return Order """ return DBSession.query(Order).filter(Order.id == id_).first() def recount_wanted_total(self): """ :var order_good: OrderGood """ self.wanted_total = sum( [order_good.wanted_total for order_good in self.order_goods]) def recount_refund_amount(self): self.refund_amount = sum( [order_good.refund_amount for order_good in self.order_goods]) def recount_paid_amount(self): self.paid_amount = sum( [order_good.paid_amount for order_good in self.order_goods]) def recount_totals(self): self.recount_wanted_total() self.recount_refund_amount() self.recount_paid_amount() def get_order_good(self, good_id, price=None): for order_good in self.order_goods: if order_good.good_id == good_id: if price == order_good.price: return order_good # not found, creating new good = Good.by_id(good_id) new_order_good = OrderGood(good_id=good.id, user_id=self.user_id) self.order_goods.append(new_order_good) DBSession.flush() new_order_good.set_price() return new_order_good def alter_wanted_good_count(self, good_id, delta_count=1.0, price=None): order_good = self.get_order_good(good_id, price) order_good.alter_count(delta_count) self.recount_wanted_total() def remove_good(self, order_good_id): raise NotImplementedError() def get_amount_to_pay(self): return self.wanted_total + self.refund_amount - self.paid_amount @staticmethod def to_order_status(order_good_status: str): status_match = { EnumOrderGoodStatus.payment_began: EnumOrderStatus.payment_began, EnumOrderGoodStatus.paid: EnumOrderStatus.paid, EnumOrderGoodStatus.payment_failed: EnumOrderStatus.payment_failed, } return status_match.get(order_good_status) def append_goods_status( self, status: str, transaction: Union[int, MoneyTransaction] = None, # transaction_status: Union[int, MoneyTransactionStatus]=None ): for order_good in self.order_goods: """:type order_good OrderGood""" order_good.append_status(status, transaction) self_new_status = self.to_order_status(status) if self_new_status is not None: self.status = self_new_status