def test_funky_ordering(self): class Pos(object): def __init__(self): self.position = None step_factory = ordering_list( "position", ordering_func=step_numbering(2) ) stepped = step_factory() stepped.append(Pos()) stepped.append(Pos()) stepped.append(Pos()) stepped.append(Pos()) for li, pos in (0, 0), (1, 2), (2, 4), (3, 6): self.assert_(stepped[li].position == pos) fib_factory = ordering_list( "position", ordering_func=fibonacci_numbering("position") ) fibbed = fib_factory() fibbed.append(Pos()) fibbed.append(Pos()) fibbed.append(Pos()) fibbed.append(Pos()) fibbed.append(Pos()) for li, pos in (0, 1), (1, 2), (2, 3), (3, 5), (4, 8): self.assert_(fibbed[li].position == pos) fibbed.insert(2, Pos()) fibbed.insert(4, Pos()) fibbed.insert(6, Pos()) for li, pos in ( (0, 1), (1, 2), (2, 3), (3, 5), (4, 8), (5, 13), (6, 21), (7, 34), ): self.assert_(fibbed[li].position == pos) alpha_factory = ordering_list("position", ordering_func=alpha_ordering) alpha = alpha_factory() alpha.append(Pos()) alpha.append(Pos()) alpha.append(Pos()) alpha.insert(1, Pos()) for li, pos in (0, "A"), (1, "B"), (2, "C"), (3, "D"): self.assert_(alpha[li].position == pos)
def test_replace(self): self._setup(ordering_list("position")) s1 = Slide("Slide #1") s1.bullets = [Bullet("1"), Bullet("2"), Bullet("3")] self.assert_(len(s1.bullets) == 3) self.assert_(s1.bullets[2].position == 2) session = create_session() session.add(s1) session.flush() new_bullet = Bullet("new 2") self.assert_(new_bullet.position is None) # mark existing bullet as db-deleted before replacement. # session.delete(s1.bullets[1]) s1.bullets[1] = new_bullet self.assert_(new_bullet.position == 1) self.assert_(len(s1.bullets) == 3) id_ = s1.id session.flush() session.expunge_all() srt = session.query(Slide).get(id_) self.assert_(srt.bullets) self.assert_(len(srt.bullets) == 3) self.assert_(srt.bullets[1].text == "new 2") self.assert_(srt.bullets[2].text == "3")
def Doc(models): if models.lang == 'ru': id = Column(Integer, primary_key=True) else: id = Column(Integer, ForeignKey(models.DocRu.id), primary_key=True, autoincrement=False) date = Column(DateTime, nullable=False, default=datetime.now, index=True) title = Column(Html(String(1000)), nullable=False, default='') summary = Column(Html(Text), nullable=False, default='') body = deferred(Column(ExpandableHtml(MediumText), nullable=False, default=ExpandableMarkup(''))) _photos, photos_edit, photos = editable_ordered_relation( models.Doc_Photo, 'photo', use_property=False) _photo_sets, photo_sets_edit, photo_sets = editable_ordered_relation( models.Doc_PhotoSet, 'photo_set', use_property=False) link_blocks_edit = relationship( models.DocLinkBlock, order_by=[models.DocLinkBlock.order_position], collection_class=ordering_list('order_position'), cascade='all, delete-orphan') # do not display blocks without links link_blocks = FilteredProperty('link_blocks_edit', has_links=True) sections = relationship( models.Section, secondary=models.Doc_Section.__table__) __mapper_args__ = {'order_by': desc(date)} def __unicode__(self): if self.id is None: return u'Новый материал' return u'Материал: {}'.format(self.title) @cached_property def index_photo(self): if self.photos: return self.photos[0] elif self.photo_sets: return self.photo_sets[0].index_photo else: return None @cached_property def all_photos(self): photos = sum([x.photos for x in self.photo_sets], []) + self.photos return list(collections.OrderedDict.fromkeys(photos)) @cached_property def links_count(self): return sum([len(x.links) for x in self.link_blocks]) @cached_property def date_formatted(self): return format_datetime(self.date, locale=self.models.lang)
def test_append_no_reorder(self): self._setup( ordering_list("position", count_from=1, reorder_on_append=False) ) s1 = Slide("Slide #1") self.assert_(not s1.bullets) self.assert_(len(s1.bullets) == 0) s1.bullets.append(Bullet("s1/b1")) self.assert_(s1.bullets) self.assert_(len(s1.bullets) == 1) self.assert_(s1.bullets[0].position == 1) s1.bullets.append(Bullet("s1/b2")) self.assert_(len(s1.bullets) == 2) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) bul = Bullet("s1/b100") bul.position = 100 s1.bullets.append(bul) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 100) s1.bullets.append(Bullet("s1/b4")) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 100) self.assert_(s1.bullets[3].position == 4) s1.bullets._reorder() self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) self.assert_(s1.bullets[3].position == 4) session = create_session() session.add(s1) session.flush() id_ = s1.id session.expunge_all() del s1 srt = session.query(Slide).get(id_) self.assert_(srt.bullets) self.assert_(len(srt.bullets) == 4) titles = ["s1/b1", "s1/b2", "s1/b100", "s1/b4"] found = [b.text for b in srt.bullets] self.assert_(titles == found)
def test_replace_two(self): """test #3191""" self._setup(ordering_list("position", reorder_on_append=True)) s1 = Slide("Slide #1") b1, b2, b3, b4 = Bullet("1"), Bullet("2"), Bullet("3"), Bullet("4") s1.bullets = [b1, b2, b3] eq_([b.position for b in s1.bullets], [0, 1, 2]) s1.bullets = [b4, b2, b1] eq_([b.position for b in s1.bullets], [0, 1, 2])
def get_dict(self, models, factory=None): remote_cls_name = self.get_remote_cls_name(factory) remote_cls = getattr(models, remote_cls_name) kwargs = {} if self.ordered: kwargs['collection_class'] = ordering_list('order') kwargs['order_by'] = [remote_cls.order] if self.cascade: kwargs['cascade'] = self.cascade if self.remote_foreign_key: kwargs['foreign_keys'] = self.get_foreign_keys(models, factory) field = relation(remote_cls, **kwargs) return { self.name: field, }
def test_slice(self): self._setup(ordering_list("position")) b = [ Bullet("1"), Bullet("2"), Bullet("3"), Bullet("4"), Bullet("5"), Bullet("6"), ] s1 = Slide("Slide #1") # 1, 2, 3 s1.bullets[0:3] = b[0:3] for i in 0, 1, 2: self.assert_(s1.bullets[i].position == i) self.assert_(s1.bullets[i] == b[i]) # 1, 4, 5, 6, 3 s1.bullets[1:2] = b[3:6] for li, bi in (0, 0), (1, 3), (2, 4), (3, 5), (4, 2): self.assert_(s1.bullets[li].position == li) self.assert_(s1.bullets[li] == b[bi]) # 1, 6, 3 del s1.bullets[1:3] for li, bi in (0, 0), (1, 5), (2, 2): self.assert_(s1.bullets[li].position == li) self.assert_(s1.bullets[li] == b[bi]) session = create_session() session.add(s1) session.flush() id_ = s1.id session.expunge_all() del s1 srt = session.query(Slide).get(id_) self.assert_(srt.bullets) self.assert_(len(srt.bullets) == 3) texts = ["1", "6", "3"] for i, text in enumerate(texts): self.assert_(srt.bullets[i].position == i) self.assert_(srt.bullets[i].text == text)
def test_insert(self): self._setup(ordering_list("position")) s1 = Slide("Slide #1") s1.bullets.append(Bullet("1")) s1.bullets.append(Bullet("2")) s1.bullets.append(Bullet("3")) s1.bullets.append(Bullet("4")) self.assert_(s1.bullets[0].position == 0) self.assert_(s1.bullets[1].position == 1) self.assert_(s1.bullets[2].position == 2) self.assert_(s1.bullets[3].position == 3) s1.bullets.insert(2, Bullet("insert_at_2")) self.assert_(s1.bullets[0].position == 0) self.assert_(s1.bullets[1].position == 1) self.assert_(s1.bullets[2].position == 2) self.assert_(s1.bullets[3].position == 3) self.assert_(s1.bullets[4].position == 4) self.assert_(s1.bullets[1].text == "2") self.assert_(s1.bullets[2].text == "insert_at_2") self.assert_(s1.bullets[3].text == "3") s1.bullets.insert(999, Bullet("999")) self.assert_(len(s1.bullets) == 6) self.assert_(s1.bullets[5].position == 5) session = create_session() session.add(s1) session.flush() id_ = s1.id session.expunge_all() del s1 srt = session.query(Slide).get(id_) self.assert_(srt.bullets) self.assert_(len(srt.bullets) == 6) texts = ["1", "2", "insert_at_2", "3", "4", "999"] found = [b.text for b in srt.bullets] self.assert_(texts == found)
def test_insert(self): self._setup(ordering_list('position')) s1 = Slide('Slide #1') s1.bullets.append(Bullet('1')) s1.bullets.append(Bullet('2')) s1.bullets.append(Bullet('3')) s1.bullets.append(Bullet('4')) self.assert_(s1.bullets[0].position == 0) self.assert_(s1.bullets[1].position == 1) self.assert_(s1.bullets[2].position == 2) self.assert_(s1.bullets[3].position == 3) s1.bullets.insert(2, Bullet('insert_at_2')) self.assert_(s1.bullets[0].position == 0) self.assert_(s1.bullets[1].position == 1) self.assert_(s1.bullets[2].position == 2) self.assert_(s1.bullets[3].position == 3) self.assert_(s1.bullets[4].position == 4) self.assert_(s1.bullets[1].text == '2') self.assert_(s1.bullets[2].text == 'insert_at_2') self.assert_(s1.bullets[3].text == '3') s1.bullets.insert(999, Bullet('999')) self.assert_(len(s1.bullets) == 6) self.assert_(s1.bullets[5].position == 5) session = create_session() session.add(s1) session.flush() id = s1.id session.expunge_all() del s1 srt = session.query(Slide).get(id) self.assert_(srt.bullets) self.assert_(len(srt.bullets) == 6) texts = ['1','2','insert_at_2','3','4','999'] found = [b.text for b in srt.bullets] self.assert_(texts == found)
def create_collection_class(owner, member, column, *, sorted=True, duplicates=True): """ Creates a class for holding the values of a collection in given *owner* class. The given *owner* class will be updated to have a new *member* with given name, which is a list containing elements as described by *column*: >>> create_collection_class(Group, 'permissions', ... Column(PermissionEnum.db_type(), nullable=False) Group objects will now have a member called 'permissions', which contain a sorted list of PermissionEnum values. See :func:`.create_relationship_class` for the description of the keyword arguments. """ name = owner.__name__ + tbl2cls(member) if sorted: bref = backref(member + '_wrapper', order_by='%s.index' % name, collection_class=ordering_list('index')) else: bref = backref(member + '_wrapper') members = { '__score_db__': { 'inheritance': None }, 'owner_id': Column(IdType, ForeignKey('%s.id' % owner.__tablename__), nullable=False), 'owner': relationship(owner, backref=bref), 'value': column, } if sorted: members['index'] = Column(Integer, nullable=False) if not duplicates: members['__table_args__'] = ( UniqueConstraint(members['owner_id'], column), ) cls = type(name, (owner.__score_db__['base'],), members) proxy = association_proxy(member + '_wrapper', 'value', creator=lambda v: cls(value=v)) setattr(owner, member, proxy) return cls
def test_slice(self): self._setup(ordering_list('position')) b = [ Bullet('1'), Bullet('2'), Bullet('3'), Bullet('4'), Bullet('5'), Bullet('6') ] s1 = Slide('Slide #1') # 1, 2, 3 s1.bullets[0:3] = b[0:3] for i in 0, 1, 2: self.assert_(s1.bullets[i].position == i) self.assert_(s1.bullets[i] == b[i]) # 1, 4, 5, 6, 3 s1.bullets[1:2] = b[3:6] for li, bi in (0,0), (1,3), (2,4), (3,5), (4,2): self.assert_(s1.bullets[li].position == li) self.assert_(s1.bullets[li] == b[bi]) # 1, 6, 3 del s1.bullets[1:3] for li, bi in (0,0), (1,5), (2,2): self.assert_(s1.bullets[li].position == li) self.assert_(s1.bullets[li] == b[bi]) session = create_session() session.add(s1) session.flush() id = s1.id session.expunge_all() del s1 srt = session.query(Slide).get(id) self.assert_(srt.bullets) self.assert_(len(srt.bullets) == 3) texts = ['1', '6', '3'] for i, text in enumerate(texts): self.assert_(srt.bullets[i].position == i) self.assert_(srt.bullets[i].text == text)
def test_replace_two(self): """test #3191""" self._setup(ordering_list('position', reorder_on_append=True)) s1 = Slide('Slide #1') b1, b2, b3, b4 = Bullet('1'), Bullet('2'), Bullet('3'), Bullet('4') s1.bullets = [b1, b2, b3] eq_( [b.position for b in s1.bullets], [0, 1, 2] ) s1.bullets = [b4, b2, b1] eq_( [b.position for b in s1.bullets], [0, 1, 2] )
def InitMapper( cls, metadata, Board ): cls.__table__ = Table( cls.__tablename__, metadata, Column('id', Integer, index = True, primary_key = True ), Column('board_id', ForeignKey( Board.id ), index = True, nullable = False ), Column('slot', Integer, index = True, nullable = False ), Column('subject', String(255), nullable = False ), Column('body', Text, nullable = False ), Column('turn', Integer, nullable = False ), Column('mtime', DateTime, nullable = False, onupdate = func.current_timestamp(), default = func.current_timestamp())) cols = cls.__table__.c Index('ix_%s_id_board' % cls.__tablename__, cols.id, cols.board_id) mapper( cls, cls.__table__, properties = { 'board': relation( Board, uselist = False, backref = backref( 'messages', collection_class = ordering_list('slot', count_from = 1), order_by = [ cols.slot ] )) })
def DocLinkBlock(models): id = Column(Integer, primary_key=True) title = Column(String(250), nullable=False, default='') doc_id = Column(Integer, ForeignKey(models.Doc.id, ondelete='CASCADE'), nullable=False) doc = relationship(models.Doc) links_edit = relationship(models.DocLink, collection_class=ordering_list('order_position'), order_by=[models.DocLink.order_position], cascade='all, delete-orphan') # do not display links which can not be transformed to urls, # for example links to unpublished documents links = FilteredProperty('links_edit', has_url=True) order_position = Column(Integer, nullable=False, default=0) @property def has_links(self): return bool(self.links) def __unicode__(self): return u"Блок ссылок: {}".format(self.title)
def InitMapper( cls, metadata, OrderType, Object, Player ): cls.__table__ = Table( cls.__tablename__, metadata, Column('id', Integer, index = True, primary_key = True), Column('slot', Integer, nullable = False), Column('type_id', ForeignKey( OrderType.id ), nullable = False), Column('object_id', ForeignKey( Object.id ), nullable = True), Column('owner_id', ForeignKey( Player.id ), index = True, nullable = True), Column('eta', Integer, nullable = False, default = 0), Column('mtime', DateTime, nullable = False, onupdate = func.current_timestamp(), default = func.current_timestamp())) cols = cls.__table__.c mapper( cls, cls.__table__, polymorphic_on = cols.type_id, properties = { 'type': relation( OrderType, uselist = False ), 'object': relation( Object, uselist = False, backref = backref( 'orders', collection_class = ordering_list('slot', count_from = 1), order_by = [ cols.slot ] )), 'owner': relation( Player, uselist = False ) })
def get_dict(self, models, factory=None): rel_name = "%s_relation" % self.name rel_cls_name = self.get_rel_cls_name(factory) rel_cls = getattr(models, rel_cls_name) remote_cls_name = self.get_remote_cls_name(factory) kwargs = {} if self.ordered: kwargs['order_by'] = rel_cls.order kwargs['collection_class'] = ordering_list('order') rel = relation(rel_cls, passive_deletes=None, cascade='all, delete-orphan', **kwargs) def creator(item): rel_cls = getattr(models, rel_cls_name) return rel_cls(**{remote_cls_name.lower():item}) field = association_proxy(rel_name, remote_cls_name.lower(), creator=creator) return { rel_name: rel, self.name: field, }
def ordered_relation(Model): return relation(Model, order_by=Model.order_position, collection_class=ordering_list('order_position'), cascade='all, delete-orphan')
## Mappers meta.mapper( Resource, resource_table, properties={ 'package': orm.relation( Package, # all resources including deleted # formally package_resources_all backref=orm.backref( 'resources_all', collection_class=ordering_list('position'), cascade='all, delete', order_by=resource_table.c.position, ), ) }, extension=[extension.PluginMapperExtension()], ) def resource_identifier(obj): return obj.id class DictProxy(object): def __init__(self, target_key, target_dict, data_type=text_type):
class TaskLineGroup(DBBASE, GroupCompute): """ Group of lines """ __table_args__ = default_table_args id = Column(Integer, primary_key=True) task_id = Column( Integer, ForeignKey('task.id', ondelete="cascade"), info={'colanderalchemy': { 'title': u"Identifiant du document", }}) description = Column(Text(), default="") title = Column(String(255), default="") order = Column(Integer, default=1) task = relationship("Task", primaryjoin="TaskLineGroup.task_id==Task.id", info={'colanderalchemy': { 'exclude': True }}) lines = relationship("TaskLine", order_by='TaskLine.order', cascade="all, delete-orphan", back_populates='group', collection_class=ordering_list('order'), info={'colanderalchemy': { 'title': u"Prestations", }}) def __json__(self, request): return dict(id=self.id, title=self.title, description=self.description, task_id=self.task_id, order=self.order, lines=[line.__json__(request) for line in self.lines]) def duplicate(self): group = TaskLineGroup( title=self.title, description=self.description, task_id=self.task_id, lines=[line.duplicate() for line in self.lines], order=self.order, ) return group def gen_cancelinvoice_group(self): res = self.duplicate() for line in res.lines: line.cost = -1 * line.cost return res @classmethod def from_sale_product_group(cls, sale_product_group): """ Build an instance based on the given sale_product_group :param obj sale_product_group: A SaleProductGroup instance :returns: A TaskLineGroup instance """ result = cls() result.title = sale_product_group.title result.description = sale_product_group.label for product in sale_product_group.products: result.lines.append(TaskLine.from_sale_product(product)) return result
class WebMapItem(Base): __tablename__ = 'webmap_item' id = db.Column(db.Integer, primary_key=True) parent_id = db.Column(db.Integer, db.ForeignKey('webmap_item.id')) item_type = db.Column(db.Enum('root', 'group', 'layer'), nullable=False) position = db.Column(db.Integer, nullable=True) display_name = db.Column(db.Unicode, nullable=True) group_expanded = db.Column(db.Boolean, nullable=True) layer_style_id = db.Column(db.ForeignKey(Resource.id), nullable=True) layer_enabled = db.Column(db.Boolean, nullable=True) layer_transparency = db.Column(db.Float, nullable=True) layer_min_scale_denom = db.Column(db.Float, nullable=True) layer_max_scale_denom = db.Column(db.Float, nullable=True) layer_adapter = db.Column(db.Unicode, nullable=True) draw_order_position = db.Column(db.Integer, nullable=True) parent = db.relationship('WebMapItem', remote_side=id, backref=db.backref( 'children', order_by=position, cascade='all, delete-orphan', collection_class=ordering_list('position'))) style = db.relationship( 'Resource', # Temporary solution that allows to automatically # remove web-map elements when style is removed backref=db.backref('webmap_items', cascade='all')) def to_dict(self): if self.item_type in ('root', 'group'): children = list(self.children) sorted(children, key=lambda c: c.position) if self.item_type == 'root': return dict( item_type=self.item_type, children=[i.to_dict() for i in children], ) elif self.item_type == 'group': return dict( item_type=self.item_type, display_name=self.display_name, group_expanded=self.group_expanded, children=[i.to_dict() for i in children], ) elif self.item_type == 'layer': style_parent_id = None if self.style and self.style.parent: style_parent_id = self.style.parent.id return dict( item_type=self.item_type, display_name=self.display_name, layer_enabled=self.layer_enabled, layer_transparency=self.layer_transparency, layer_style_id=self.layer_style_id, style_parent_id=style_parent_id, layer_min_scale_denom=self.layer_min_scale_denom, layer_max_scale_denom=self.layer_max_scale_denom, layer_adapter=self.layer_adapter, draw_order_position=self.draw_order_position, ) def from_dict(self, data): assert data['item_type'] == self.item_type if data['item_type'] in ('root', 'group') and 'children' in data: self.children = [] for i in data['children']: child = WebMapItem(parent=self, item_type=i['item_type']) child.from_dict(i) self.children.append(child) for a in ('display_name', 'group_expanded', 'layer_enabled', 'layer_adapter', 'layer_style_id', 'layer_transparency', 'layer_min_scale_denom', 'layer_max_scale_denom', 'draw_order_position'): if a in data: setattr(self, a, data[a])
class CampaignAction(BaseScopedNameMixin, db.Model): """ Actions available to a user in a campaign """ __tablename__ = 'campaign_action' #: Campaign campaign_id = db.Column(None, db.ForeignKey('campaign.id'), nullable=False) campaign = db.relationship(Campaign, backref=db.backref( 'actions', cascade='all, delete-orphan', order_by='CampaignAction.seq', collection_class=ordering_list('seq'))) parent = db.synonym('campaign') #: Sequence number seq = db.Column(db.Integer, nullable=False, default=0) #: Is this action live? public = db.Column(db.Boolean, nullable=False, default=False) #: Action type type = db.Column(db.Enum(*CAMPAIGN_ACTION.keys(), name='campaign_action_type_enum'), nullable=False, default=CAMPAIGN_ACTION) # type = db.Column(db.Char(1), # db.CheckConstraint('type IN (%s)' % ', '.join(["'%s'" % k for k in CAMPAIGN_ACTION.keys()])), # nullable=False, # default=CAMPAIGN_ACTION) #: Action category (for buttons) category = db.Column(db.Unicode(20), nullable=False, default=u'default') #: Icon to accompany text icon = db.Column(db.Unicode(20), nullable=True) #: Group (for RSVP buttons) group = db.Column(db.Unicode(20), nullable=True) #: Target link (for follow link actions; blank = ?) link = deferred(db.Column(db.Unicode(250), nullable=True)) #: Form form = deferred(db.Column(JsonDict, nullable=False, server_default='{}')) #: Post action message message = db.Column(db.UnicodeText, nullable=False, default=u'') __table_args__ = (db.UniqueConstraint('campaign_id', 'name'), ) @classmethod def get(cls, campaign, name): return cls.query.filter_by(campaign=campaign, name=name).one_or_none() def url_for(self, action='view', _external=False, **kwargs): if action == 'edit': return url_for('campaign_action_edit', campaign=self.campaign.name, action=self.name, _external=_external, **kwargs) elif action == 'delete': return url_for('campaign_action_delete', campaign=self.campaign.name, action=self.name, _external=_external, **kwargs) elif action == 'csv': return url_for('campaign_action_csv', campaign=self.campaign.name, action=self.name, _external=_external, **kwargs)
class Item(BaseScopedNameMixin, db.Model): __tablename__ = 'item' __uuid_primary_key__ = True __table_args__ = (db.UniqueConstraint('item_collection_id', 'name'), ) description = MarkdownColumn('description', default='', nullable=False) seq = db.Column(db.Integer, nullable=False) item_collection_id = db.Column(None, db.ForeignKey('item_collection.id'), nullable=False) item_collection = db.relationship( 'ItemCollection', backref=db.backref( 'items', cascade='all, delete-orphan', order_by=seq, collection_class=ordering_list('seq', count_from=1), ), ) parent = db.synonym('item_collection') category_id = db.Column(None, db.ForeignKey('category.id'), nullable=False) category = db.relationship(Category, backref=db.backref( 'items', cascade='all, delete-orphan')) quantity_total = db.Column(db.Integer, default=0, nullable=False) discount_policies = db.relationship( 'DiscountPolicy', secondary=item_discount_policy, # type: ignore[has-type] backref='items', lazy='dynamic', ) assignee_details = db.Column(JsonDict, server_default='{}', nullable=False) event_date = db.Column(db.Date, nullable=True) cancellable_until = db.Column(db.TIMESTAMP(timezone=True), nullable=True) transferable_until = db.Column(db.TIMESTAMP(timezone=True), nullable=True) restricted_entry = db.Column(db.Boolean, default=False, nullable=False) # ISO 3166-2 code. Eg: KA for Karnataka place_supply_state_code = db.Column(db.Unicode(3), nullable=True) # ISO country code place_supply_country_code = db.Column(db.Unicode(2), nullable=True) __roles__ = { 'item_owner': { 'read': { 'id', 'title', 'description_text', 'description_html', 'quantity_total', 'quantity_available', 'active_price', 'assignee_details', } } } def roles_for(self, actor=None, anchors=()): roles = super(Item, self).roles_for(actor, anchors) if self.item_collection.organization.userid in actor.organizations_owned_ids( ): roles.add('item_owner') return roles def current_price(self): """Return the current price object for an item.""" return self.price_at(utcnow()) def has_higher_price(self, current_price): """Check if item has a higher price than the given current price.""" return Price.query.filter( Price.end_at > current_price.end_at, Price.item == self, Price.discount_policy_id.is_(None), ).notempty() def discounted_price(self, discount_policy): """Return the discounted price for an item.""" return Price.query.filter( Price.item == self, Price.discount_policy == discount_policy).one_or_none() def standard_prices(self): return Price.query.filter(Price.item == self, Price.discount_policy_id.is_(None)).order_by( Price.start_at.desc()) def price_at(self, timestamp): """Return the price object for an item at a given time.""" return (Price.query.filter( Price.item == self, Price.start_at <= timestamp, Price.end_at > timestamp, Price.discount_policy_id.is_(None), ).order_by(Price.created_at.desc()).first()) @classmethod def get_by_category(cls, category): return cls.query.filter(Item.category == category).order_by(cls.seq) @hybrid_property def quantity_available(self): available_count = self.quantity_total - self.confirmed_line_items.count( ) return available_count if available_count > 0 else 0 @property def is_available(self): """ Check if an item is available. Test: has a current price object and has a positive quantity_available """ return bool(self.current_price() and self.quantity_available > 0) def is_cancellable(self): return utcnow( ) < self.cancellable_until if self.cancellable_until else True @property def active_price(self): current_price = self.current_price() return current_price.amount if current_price else None @property def confirmed_line_items(self): """Return a query object preset with an item's confirmed line items.""" return self.line_items.filter( LineItem.status == LINE_ITEM_STATUS.CONFIRMED) @with_roles(call={'item_owner'}) def sold_count(self): return self.confirmed_line_items.filter( LineItem.final_amount > 0).count() @with_roles(call={'item_owner'}) def free_count(self): return self.confirmed_line_items.filter( LineItem.final_amount == 0).count() @with_roles(call={'item_owner'}) def cancelled_count(self): return self.line_items.filter( LineItem.status == LINE_ITEM_STATUS.CANCELLED).count() @with_roles(call={'item_owner'}) def net_sales(self): return (db.session.query(db.func.sum(LineItem.final_amount)).filter( LineItem.item == self, LineItem.status == LINE_ITEM_STATUS.CONFIRMED).first()[0]) @classmethod def get_availability(cls, item_ids): """ Return an availability dict. {'item_id': ('item title', 'quantity_total', 'line_item_count')} """ items_dict = {} item_tups = (db.session.query( cls.id, cls.title, cls.quantity_total, db.func.count(cls.id)).join(LineItem).filter( LineItem.item_id.in_(item_ids), LineItem.status == LINE_ITEM_STATUS.CONFIRMED, ).group_by(cls.id).all()) for item_tup in item_tups: items_dict[str(item_tup[0])] = item_tup[1:] return items_dict def demand_curve(self): query = (db.session.query(db.column('final_amount'), db.column('count')).from_statement( db.text(''' SELECT final_amount, count(*) FROM line_item WHERE item_id = :item_id AND final_amount > 0 AND status = :status GROUP BY final_amount ORDER BY final_amount; ''')).params(item_id=self.id, status=LINE_ITEM_STATUS.CONFIRMED)) return db.session.execute(query).fetchall()
primary_key=True) position = Column(Integer, nullable=False) creation_date = deferred(Column(DateTime, default=datetime.now, nullable=False)) comments = deferred(Column(String(255), nullable=True)) service_instance = relation(ServiceInstance) system = relation(System, uselist=False, backref='sislist') def __str__(self): return str(self.system.fqdn) def __repr__(self): return self.__class__.__name__ + " " + str(self.system.fqdn) service_instance_server = ServiceInstanceServer.__table__ service_instance_server.primary_key.name='service_instance_server_pk' table = service_instance_server #TODO: would we like this mapped in service_instance.py instead? ServiceInstance.servers = relation(ServiceInstanceServer, collection_class=ordering_list('position'), order_by=[ServiceInstanceServer.__table__.c.position])
class Task(Base): """Class to store a task. """ __tablename__ = 'tasks' __table_args__ = ( UniqueConstraint('contest_id', 'num'), UniqueConstraint('contest_id', 'name'), ForeignKeyConstraint( ("id", "active_dataset_id"), ("datasets.task_id", "datasets.id"), onupdate="SET NULL", ondelete="SET NULL", # Use an ALTER query to set this foreign key after # both tables have been CREATEd, to avoid circular # dependencies. use_alter=True, name="fk_active_dataset_id"), CheckConstraint("token_gen_initial <= token_gen_max"), ) # Auto increment primary key. id = Column( Integer, primary_key=True, # Needed to enable autoincrement on integer primary keys that # are referenced by a foreign key defined on this table. autoincrement='ignore_fk') # Number of the task for sorting. num = Column(Integer, nullable=False) # Contest (id and object) owning the task. contest_id = Column(Integer, ForeignKey(Contest.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) contest = relationship(Contest, backref=backref( 'tasks', collection_class=ordering_list('num'), order_by=[num], cascade="all, delete-orphan", passive_deletes=True)) # Short name and long human readable title of the task. name = Column(Unicode, nullable=False) title = Column(Unicode, nullable=False) # A JSON-encoded lists of strings: the language codes of the # statements that will be highlighted to all users for this task. primary_statements = Column(String, nullable=False, default="[]") # The parameters that control task-tokens follow. Note that their # effect during the contest depends on the interaction with the # parameters that control contest-tokens, defined on the Contest. # The "kind" of token rules that will be active during the contest. # - disabled: The user will never be able to use any token. # - finite: The user has a finite amount of tokens and can choose # when to use them, subject to some limitations. Tokens may not # be all available at start, but given periodically during the # contest instead. # - infinite: The user will always be able to use a token. token_mode = Column(Enum("disabled", "finite", "infinite", name="token_mode"), nullable=False, default="disabled") # The maximum number of tokens a contestant is allowed to use # during the whole contest (on this tasks). token_max_number = Column(Integer, CheckConstraint("token_max_number > 0"), nullable=True) # The minimum interval between two successive uses of tokens for # the same user (on this task). token_min_interval = Column( Interval, CheckConstraint("token_min_interval >= '0 seconds'"), nullable=False, default=timedelta()) # The parameters that control generation (if mode is "finite"): # the user starts with "initial" tokens and receives "number" more # every "interval", but their total number is capped to "max". token_gen_initial = Column(Integer, CheckConstraint("token_gen_initial >= 0"), nullable=False, default=2) token_gen_number = Column(Integer, CheckConstraint("token_gen_number >= 0"), nullable=False, default=2) token_gen_interval = Column( Interval, CheckConstraint("token_gen_interval > '0 seconds'"), nullable=False, default=timedelta(minutes=30)) token_gen_max = Column(Integer, CheckConstraint("token_gen_max > 0"), nullable=True) # Maximum number of submissions or user_tests allowed for each user # on this task during the whole contest or None to not enforce # this limitation. max_submission_number = Column( Integer, CheckConstraint("max_submission_number > 0"), nullable=True) max_user_test_number = Column(Integer, CheckConstraint("max_user_test_number > 0"), nullable=True) # Minimum interval between two submissions or user_tests for this # task, or None to not enforce this limitation. min_submission_interval = Column( Interval, CheckConstraint("min_submission_interval > '0 seconds'"), nullable=True) min_user_test_interval = Column( Interval, CheckConstraint("min_user_test_interval > '0 seconds'"), nullable=True) # The scores for this task will be rounded to this number of # decimal places. score_precision = Column(Integer, CheckConstraint("score_precision >= 0"), nullable=False, default=0) # Score mode for the task. score_mode = Column(Enum(SCORE_MODE_MAX_TOKENED_LAST, SCORE_MODE_MAX, name="score_mode"), nullable=False, default=SCORE_MODE_MAX_TOKENED_LAST) # Active Dataset (id and object) currently being used for scoring. # The ForeignKeyConstraint for this column is set at table-level. active_dataset_id = Column(Integer, nullable=True) active_dataset = relationship( 'Dataset', foreign_keys=[active_dataset_id], # XXX In SQLAlchemy 0.8 we could remove this: primaryjoin='Task.active_dataset_id == Dataset.id', # Use an UPDATE query *after* an INSERT query (and *before* a # DELETE query) to set (and unset) the column associated to # this relationship. post_update=True)
class Recipe(db.Model): """ Recipe Model for storing recipes """ __tablename__ = "recipe" id = db.Column(db.Integer, primary_key=True, autoincrement=True) title = db.Column(db.String(255), nullable=False) created_on = db.Column(db.DateTime, nullable=False) cooktime = db.Column(db.String(255), nullable=False) preptime = db.Column(db.String(255), nullable=False) totaltime = db.Column(db.String(255), nullable=False) username = db.Column(db.String(50), db.ForeignKey('user.username'), nullable=False) remixes = db.relationship("Recipe", backref=db.backref('parent', remote_side=[id])) parent_id = db.Column(db.Integer, db.ForeignKey('recipe.id')) public = db.Column(db.Boolean) difficulty = db.Column(db.Integer, nullable=True) servings = db.Column(db.String(255), nullable=False) source = db.Column(db.String(255), nullable=False) calories = db.Column(db.Integer, nullable=True) cost = db.Column(db.Float, nullable=True) description = db.Column(db.Text) f_image = db.Column(db.Integer, nullable=True) images = db.relationship('Image', order_by='Image.id', backref='recipe', cascade='all,delete,delete-orphan') ingredients = db.relationship('Ingredient', order_by='Ingredient.number', collection_class=ordering_list('number', count_from=1), backref='recipe', lazy=True, cascade='all,delete,delete-orphan') steps = db.relationship('Step', order_by='Step.number', collection_class=ordering_list('number', count_from=1), backref='recipe', lazy=True, cascade='all,delete,delete-orphan') @hybrid_property def remix_count(self): return len(self.remixes) @remix_count.expression def _remix_count_expression(cls): q = db.select([db.func.count(Recipe.parent_id)]).\ where(Recipe.parent_id == cls.id).\ label("remix_count") return q @hybrid_property def featured_image(self): if self.f_image is not None: return self.images[self.f_image] return None @hybrid_property def community_images(self): return (list( filter(lambda img: img.username != self.username, self.images)) + [ r.featured_image for r in self.remixes if r.featured_image is not None ]) @hybrid_property def owner_images(self): return list( filter(lambda img: img.username == self.username, self.images)) @hybrid_property def likes_count(self): return len(self.likers) #TODO make has_liked work when nested, as in viewing other user's recipes @hybrid_method def has_liked(self, username): return db.session.query(likes).filter( likes.c.username == username).filter( likes.c.recipe_id == self.id).first() != None @likes_count.expression def _likes_count_expression(cls): return (db.select([ db.func.count(likes.c.username).label("likes_count") ]).where(likes.c.recipe_id == cls.id).label("sum_likes")) def __repr__(self): return "<Recipe '{}'>".format(self.title)
class Dimension(SurveyBase): id = db.Column(db.Integer, db.ForeignKey(SurveyBase.id), primary_key=True) __tablename__ = 'dimension' __mapper_args__ = {'polymorphic_identity': __tablename__} # columns position = db.Column(db.Integer) randomize_question_order = db.Column(db.Boolean, nullable=False, default=False) # foreign keys questionnaire_id = db.Column(db.Integer, db.ForeignKey('questionnaire.id')) # relationships questions = db.relationship( 'Question', backref='dimension', cascade='all, delete-orphan', foreign_keys=[Question.dimension_id], order_by='Question.position', collection_class=ordering_list('position') ) tracker_args = { __('name'): TrackingType.TranslationHybrid, __('randomize_question_order'): TrackingType.Primitive } @property @abstractmethod def name(self) -> str: raise NotImplementedError @property @abstractmethod def name_translations(self) -> Dict[str, str]: raise NotImplementedError @property @abstractmethod def shadow(self) -> bool: raise NotImplementedError @property @abstractmethod def original_language(self) -> BabelLanguage: raise NotImplementedError def new_question(self, text: str, range_start_label, range_end_label, **kwargs) -> ConcreteQuestion: if not isinstance(self, ConcreteDimension): raise BusinessRuleViolation("Can't modify shadow instances!") question = ConcreteQuestion(text, range_start_label, range_end_label, **kwargs) self.questions.append(question) for copy in self.copies: s_question = ShadowQuestion(question) s_question.owners = copy.owners copy.questions.append(question) item_added.send(self, added_item=question) return question def add_shadow_question(self, concrete_question: ConcreteQuestion)\ -> ShadowQuestion: if not isinstance(self, ConcreteDimension): raise BusinessRuleViolation("Can't modify shadow instances!") question = ShadowQuestion(concrete_question) self.questions.append(question) item_added.send(self, added_item=question) return question def remove_question(self, question): if not isinstance(self, ConcreteDimension): raise BusinessRuleViolation("Can't modify shadow instances!") if question not in self.questions: raise KeyError("Question not in Dimension.") copies = [] if question.shadow: for dimension_copy in self.copies: copies += list(filter( lambda q: q.concrete_id == question.concrete_id if q.shadow else False, dimension_copy.questions )) else: copies = question.copies for copy in copies: db.session.delete(copy) item_removed.send(self, removed_item_name=question.text) self.questions.remove(question)
class Transaction(Base): __tablename__ = 'transaction' id = Column(Integer, Sequence('transaction_id'), primary_key=True, nullable=False, unique=True, index=True) tx_hash = Column(BINARY(32), nullable=False) # , unique=True? version = Column(BigInteger, nullable=False) lock_time = Column(BigInteger, nullable=False) tx_inputs = relationship( 'TxIn', order_by='TxIn.transaction_index', collection_class=ordering_list('transaction_index')) tx_outputs = relationship( 'TxOut', order_by='TxOut.transaction_index', collection_class=ordering_list('transaction_index')) block_id = Column(Integer, ForeignKey('block.id'), index=True) block_index = Column(Integer, nullable=False) # block = relationship('Block') __table_args__ = ( Index( 'ix_transaction_tx_hash', tx_hash, # We only ever do == against tx_hash so a hash index will be more efficient postgresql_using='hash', ), ) @classmethod def from_protocol(cls, in_tx): out_tx = cls() out_tx.tx_hash = in_tx.tx_hash out_tx.version = in_tx.version out_tx.lock_time = in_tx.lock_time out_tx.tx_inputs = [TxIn.from_protocol(txin) for txin in in_tx.tx_in] out_tx.tx_outputs = [ TxOut.from_protocol(txout) for txout in in_tx.tx_out ] return out_tx def to_protocol(self): return protocol.Transaction( self.version, [txin.to_protocol() for txin in self.tx_inputs], [txout.to_protocol() for txout in self.tx_outputs], self.lock_time) @classmethod def copy_obj(cls, obj): ret = cls() ret.id = obj.id ret.tx_hash = obj.tx_hash ret.version = obj.version ret.lock_time = obj.lock_time ret.tx_inputs = [TxIn.copy_obj(txin) for txin in obj.tx_inputs] ret.tx_outputs = [TxOut.copy_obj(txout) for txout in obj.tx_outputs] ret.block_id = obj.block_id return ret def __repr__(self): return 'Transaction(%s, %r, [%s], [%s], %r)' % (binascii.hexlify( self.tx_hash), self.version, ', '.join( repr(tx) for tx in self.tx_inputs), ', '.join( repr(tx) for tx in self.tx_outputs), self.lock_time)
class Block(Base): __tablename__ = 'block' id = Column(Integer, Sequence('block_id'), primary_key=True, nullable=False, unique=True, index=True) block_hash = Column(BINARY(32), nullable=False) # , unique=True? version = Column(BigInteger, nullable=False) prev_block_hash = Column(BINARY(32), nullable=False) merkle_root = Column(BINARY(32), nullable=False) timestamp = Column(BigInteger, nullable=False) bits = Column(BigInteger, nullable=False) nonce = Column(BigInteger, nullable=False) transactions = relationship('Transaction', order_by='Transaction.block_index', collection_class=ordering_list('block_index')) prev_block_id = Column(Integer, nullable=True, index=True) depth = Column(Integer, nullable=True) __table_args__ = ( Index( 'ix_block_block_hash', block_hash, # We only ever do == against block_hash so a hash index will be more efficient postgresql_using='hash', ), Index( 'ix_block_prev_block_hash', prev_block_hash, postgresql_where=prev_block_id.is_(None), # We only ever do == against prev_block_hash so a hash index will be more efficient postgresql_using='hash', ), ) pending_meta_updates = [] def bulk_insert(self, session): conn = session.connection() data = row_to_dict(self) data.pop('id') ret = conn.execute(self.__table__.insert().values(data)) self.id = ret.inserted_primary_key[0] start = datetime.datetime.now() data = [] for txn in self.transactions: txn.block_id = self.id (txn.id, ) = session.execute( txn.__table__.columns['id'].default.next_value()).fetchone() row = row_to_dict(txn) data.append(row) conn.execute(txn.__table__.insert().values(data)) log.info('Processing transactions took %s', datetime.datetime.now() - start) start = datetime.datetime.now() data = [] for txn in self.transactions: for txin in txn.tx_inputs: txin.transaction_id = txn.id row = row_to_dict(txin) row.pop('id') data.append(row) conn.execute(txin.__table__.insert().values(data)) log.info('Processing txins took %s', datetime.datetime.now() - start) start = datetime.datetime.now() data = [] for txn in self.transactions: for txout in txn.tx_outputs: txout.transaction_id = txn.id row = row_to_dict(txout) row.pop('id') data.append(row) conn.execute(txout.__table__.insert().values(data)) log.info('Processing txouts took %s', datetime.datetime.now() - start) def update_chain_metadata(self, session, _update_pending=True): # TODO: This needs to be rewritten to keep an index of blocks not in the known block chain and properly choose the right ones when updating pending metadata # TODO: update IOLoop.max_height res = session.query( Block.depth, Block.id).filter(Block.block_hash == self.prev_block_hash).first() if res is None: log.warn('Previous block not found, queueing metadata update (%s)', binascii.hexlify(self.block_hash)) self.pending_meta_updates.append(self) return False else: depth, self.prev_block_id = res self.depth = depth + 1 if depth is not None else None session.query(Block).filter(Block.id == self.id).update( values={ 'depth': self.depth, 'prev_block_id': self.prev_block_id }) session.commit() if self.depth is None: log.warn( 'Previous block found but depth is null, queueing metadata update (%s)', binascii.hexlify(self.block_hash)) self.pending_meta_updates.append(self) return False if _update_pending and self.pending_meta_updates: success = True while success: success = False for to_update in self.pending_meta_updates[:]: log.info('Running pending meta update for %s', binascii.hexlify(to_update.block_hash)) if to_update.update_chain_metadata( session, _update_pending=False): success = True log.info( 'Pending metadata update succeeded for %s', binascii.hexlify(to_update.block_hash)) self.pending_meta_updates.remove(to_update) else: log.error('Pending metadata update failed for %s', binascii.hexlify(to_update.block_hash)) return True def update_metadata(self, session, _update_pending=True): ## TODO: This does not take into account branching block chains log.info('Matching txin to previous txout') start = datetime.datetime.now() inserted = session.execute( insert(TxIn_TxOut).returning( TxIn_TxOut.txin_id, TxIn_TxOut.txout_id).from_select( [TxIn_TxOut.txin_id, TxIn_TxOut.txout_id], session.query( TxInUnmatched.txin_id.label('txin_id'), TxOut.id.label('txout_id')).join( Transaction, Transaction.tx_hash == TxInUnmatched. previous_output_transaction_hash).join( TxOut, (Transaction.id == TxOut.transaction_id) & (TxOut.transaction_index == TxInUnmatched. previous_output_index)))).fetchall() log.info('...%i rows, %s', len(inserted), datetime.datetime.now() - start) txin_ids = [i[0] for i in inserted] txout_ids = [i[1] for i in inserted] log.info('Removing outdated txin_unmatched records') start = datetime.datetime.now() res = session.query(TxInUnmatched).filter( TxInUnmatched.txin_id.in_(txin_ids)).delete( synchronize_session=False) log.info('...%i rows, %s', res, datetime.datetime.now() - start) start = datetime.datetime.now() log.info('Removing outdated txout_unspent records') start = datetime.datetime.now() res = session.query(TxOutUnspent).filter( TxOutUnspent.txout_id.in_(txout_ids)).delete( synchronize_session=False) log.info('...%i rows, %s', res, datetime.datetime.now() - start) session.expire_all() return self.update_chain_metadata(session) @classmethod def from_protocol(cls, block): out_block = cls() out_block.block_hash = block.block_hash out_block.version = block.version out_block.prev_block_hash = block.prev_block_hash out_block.merkle_root = block.merkle_root out_block.timestamp = block.timestamp out_block.bits = block.bits out_block.nonce = block.nonce out_block.transactions = [ Transaction.from_protocol(tx) for tx in block.txns ] return out_block def to_protocol(self): return protocol.Block(self.version, self.prev_block_hash, self.merkle_root, self.timestamp, self.bits, self.nonce, [tx.to_protocol() for tx in self.transactions]) @classmethod def copy_obj(cls, obj): ret = cls() ret.id = obj.id ret.block_hash = obj.block_hash ret.version = obj.version ret.prev_block_hash = obj.prev_block_hash ret.merkle_root = obj.merkle_root ret.timestamp = obj.timestamp ret.bits = obj.bits ret.nonce = obj.nonce ret.transactions = [ Transaction.copy_obj(tx) for tx in obj.transactions ] return ret def __repr__(self): return 'Block(%r, %s, %s, %r, %r, %r, %r)' % ( self.version, binascii.hexlify( self.prev_block_hash), binascii.hexlify(self.merkle_root), self.timestamp, self.bits, self.nonce, self.transactions)
class EndpointSIP(Base): __tablename__ = 'endpoint_sip' __table_args__ = (UniqueConstraint('name'), ) uuid = Column(UUID(as_uuid=True), server_default=text('uuid_generate_v4()'), primary_key=True) label = Column(Text) name = Column(Text, nullable=False) asterisk_id = Column(Text) tenant_uuid = Column(String(36), ForeignKey('tenant.uuid', ondelete='CASCADE'), nullable=False) transport_uuid = Column(UUID(as_uuid=True), ForeignKey('pjsip_transport.uuid')) template = Column(Boolean, server_default=text('false')) transport = relationship('PJSIPTransport') template_relations = relationship( 'EndpointSIPTemplate', primaryjoin='EndpointSIP.uuid == EndpointSIPTemplate.child_uuid', cascade='all, delete-orphan', order_by='EndpointSIPTemplate.priority', collection_class=ordering_list('priority'), ) templates = association_proxy( 'template_relations', 'parent', creator=lambda _sip: EndpointSIPTemplate(parent=_sip), ) _options = column_property( select([column('options')]).where(column('root') == uuid).select_from( table('endpoint_sip_options_view')).as_scalar()) _aor_section = relationship( 'AORSection', uselist=False, cascade="all, delete-orphan", passive_deletes=True, ) _auth_section = relationship( 'AuthSection', uselist=False, cascade="all, delete-orphan", passive_deletes=True, ) _endpoint_section = relationship( 'EndpointSection', uselist=False, cascade="all, delete-orphan", passive_deletes=True, ) _registration_section = relationship('RegistrationSection', uselist=False, cascade="all, delete-orphan", passive_deletes=True) _registration_outbound_auth_section = relationship( 'RegistrationOutboundAuthSection', uselist=False, cascade="all, delete-orphan", passive_deletes=True, ) _identify_section = relationship( 'IdentifySection', uselist=False, cascade="all, delete-orphan", passive_deletes=True, ) _outbound_auth_section = relationship( 'OutboundAuthSection', uselist=False, cascade="all, delete-orphan", passive_deletes=True, ) def __init__(self, aor_section_options=None, auth_section_options=None, endpoint_section_options=None, registration_section_options=None, registration_outbound_auth_section_options=None, identify_section_options=None, outbound_auth_section_options=None, caller_id=None, *args, **kwargs): if aor_section_options: kwargs['_aor_section'] = AORSection(options=aor_section_options, ) if auth_section_options: kwargs['_auth_section'] = AuthSection( options=auth_section_options, ) if endpoint_section_options: kwargs['_endpoint_section'] = EndpointSection( options=endpoint_section_options, ) if registration_section_options: kwargs['_registration_section'] = RegistrationSection( options=registration_section_options, ) if registration_outbound_auth_section_options: kwargs[ '_registration_outbound_auth_section'] = RegistrationOutboundAuthSection( options=registration_outbound_auth_section_options, ) if identify_section_options: kwargs['_identify_section'] = IdentifySection( options=identify_section_options, ) if outbound_auth_section_options: kwargs['_outbound_auth_section'] = OutboundAuthSection( options=outbound_auth_section_options, ) super(EndpointSIP, self).__init__(*args, **kwargs) if caller_id: self.caller_id = caller_id def __repr__(self): return 'EndpointSIP(label={})'.format(self.label) @hybrid_property def aor_section_options(self): if not self._aor_section: return [] return self._aor_section.options @aor_section_options.setter def aor_section_options(self, options): if not self._aor_section: self._aor_section = AORSection(options=options) elif options: self._aor_section.options = options else: self._aor_section = None @hybrid_property def auth_section_options(self): if not self._auth_section: return [] return self._auth_section.options @auth_section_options.setter def auth_section_options(self, options): if not self._auth_section: self._auth_section = AuthSection(options=options) elif options: self._auth_section.options = options else: self._auth_section = None @hybrid_property def endpoint_section_options(self): if not self._endpoint_section: return [] return self._endpoint_section.options @endpoint_section_options.setter def endpoint_section_options(self, options): if not self._endpoint_section: self._endpoint_section = EndpointSection(options=options) elif options: self._endpoint_section.options = options else: self._endpoint_section = None def _get_combined_section_options(self, section_name): inherited_options = getattr( self, 'inherited_{}_section_options'.format(section_name)) endpoint_options = getattr(self, '{}_section_options'.format(section_name)) return inherited_options + endpoint_options @hybrid_property def combined_aor_section_options(self): return self._get_combined_section_options('aor') @hybrid_property def combined_auth_section_options(self): return self._get_combined_section_options('auth') @hybrid_property def combined_endpoint_section_options(self): return self._get_combined_section_options('endpoint') @hybrid_property def combined_registration_section_options(self): return self._get_combined_section_options('registration') @hybrid_property def combined_registration_outbound_auth_section_options(self): return self._get_combined_section_options('registration_outbound_auth') @hybrid_property def combined_identify_section_options(self): return self._get_combined_section_options('identify') @hybrid_property def combined_outbound_auth_section_options(self): return self._get_combined_section_options('outbound_auth') def _get_inherited_section_options(self, section_name): if not self.templates: return [] options = [] for template in self.templates: template_options = getattr( template, 'combined_{}_section_options'.format(section_name), ) for k, v in template_options: options.append([k, v]) return options @hybrid_property def inherited_aor_section_options(self): return self._get_inherited_section_options('aor') @hybrid_property def inherited_auth_section_options(self): return self._get_inherited_section_options('auth') @hybrid_property def inherited_endpoint_section_options(self): return self._get_inherited_section_options('endpoint') @hybrid_property def inherited_registration_section_options(self): return self._get_inherited_section_options('registration') @hybrid_property def inherited_registration_outbound_auth_section_options(self): return self._get_inherited_section_options( 'registration_outbound_auth') @hybrid_property def inherited_identify_section_options(self): return self._get_inherited_section_options('identify') @hybrid_property def inherited_outbound_auth_section_options(self): return self._get_inherited_section_options('outbound_auth') @hybrid_property def registration_section_options(self): if not self._registration_section: return [] return self._registration_section.options @registration_section_options.setter def registration_section_options(self, options): if not self._registration_section: self._registration_section = RegistrationSection(options=options) elif options: self._registration_section.options = options else: self._registration_section = None @hybrid_property def registration_outbound_auth_section_options(self): if not self._registration_outbound_auth_section: return [] return self._registration_outbound_auth_section.options @registration_outbound_auth_section_options.setter def registration_outbound_auth_section_options(self, options): if not self._registration_outbound_auth_section: self._registration_outbound_auth_section = RegistrationOutboundAuthSection( options=options, ) elif options: self._registration_outbound_auth_section.options = options else: self._registration_outbound_auth_section = None @hybrid_property def identify_section_options(self): if not self._identify_section: return [] return self._identify_section.options @identify_section_options.setter def identify_section_options(self, options): if not self._identify_section: self._identify_section = IdentifySection(options=options) elif options: self._identify_section.options = options else: self._identify_section = None @hybrid_property def outbound_auth_section_options(self): if not self._outbound_auth_section: return [] return self._outbound_auth_section.options @outbound_auth_section_options.setter def outbound_auth_section_options(self, options): if not self._outbound_auth_section: self._outbound_auth_section = OutboundAuthSection(options=options) elif options: self._outbound_auth_section.options = options else: self._outbound_auth_section = None line = relationship('LineFeatures', uselist=False) trunk = relationship('TrunkFeatures', uselist=False) @hybrid_property def caller_id(self): if not self._endpoint_section: return matching_options = self._endpoint_section.find('callerid') for key, value in matching_options: return value @caller_id.expression def caller_id(cls): return cls._query_option_value('callerid') @caller_id.setter def caller_id(self, caller_id): if not self._endpoint_section: self._endpoint_section = EndpointSection() self._endpoint_section.add_or_replace('callerid', caller_id) def update_caller_id(self, user, extension=None): # Copied from usersip name, num = user.extrapolate_caller_id(extension) caller_id = u'"{}"'.format(name) if num: caller_id += u" <{}>".format(num) self.caller_id = caller_id def endpoint_protocol(self): return 'sip' @hybrid_property def username(self): return self._find_first_value(self._auth_section, 'username') @username.expression def username(cls): return select([EndpointSIPSectionOption.value]).where( and_( cls.uuid == EndpointSIPSection.endpoint_sip_uuid, EndpointSIPSection.type == 'auth', EndpointSIPSectionOption.endpoint_sip_section_uuid == EndpointSIPSection.uuid, EndpointSIPSectionOption.key == 'username', )).as_scalar() @hybrid_property def password(self): return self._find_first_value(self._auth_section, 'password') @password.expression def password(cls): return select([EndpointSIPSectionOption.value]).where( and_( cls.uuid == EndpointSIPSection.endpoint_sip_uuid, EndpointSIPSection.type == 'auth', EndpointSIPSectionOption.endpoint_sip_section_uuid == EndpointSIPSection.uuid, EndpointSIPSectionOption.key == 'password', )).as_scalar() def _find_first_value(self, section, key): if not section: return matching_options = section.find(key) for _, value in matching_options: return value def get_option_value(self, option): if not self._options: return None return self._options.get(option, None) @classmethod def _query_option_value(cls, option): if option is None: return None return cls._options.remote_attr[option].astext
class Post(db.Model): __tablename__ = 'posts' id = db.Column(db.Integer, primary_key=True) uuid = db.Column(db.String(100), unique=True) path = db.Column(db.String(512), unique=True) project = db.Column(db.String(512), nullable=True) # DEPRECATED repository = db.Column(db.String(512)) revision = db.Column(db.Integer()) title = db.Column(db.Text()) tldr = db.Column(db.Text) keywords = db.Column(db.Text) thumbnail = db.Column(db.Text()) private = db.Column(db.Integer()) created_at = db.Column(db.DateTime, default=func.now()) updated_at = db.Column(db.DateTime, default=func.now()) _authors_assoc = db.relationship("PostAuthorAssoc", order_by='PostAuthorAssoc.order', collection_class=ordering_list('order'), cascade="all, delete-orphan") _authors = association_proxy( '_authors_assoc', 'author', creator=lambda author: PostAuthorAssoc(author=author), ) @hybrid_property def authors(self): return self._authors @authors.setter def authors(self, authors): """ Sets the tags of the post to the tags given in comma delimited string form in tags_string """ user_objs = [] for author in authors: if not isinstance(author, User): author = author.strip() author = User(username=author) user_objs.append(author) self._authors = user_objs @hybrid_property def authors_string(self): return ', '.join([author.format_name for author in self.authors]) @authors_string.expression def authors_string(self): raise NotImplementedError _tags = db.relationship("Tag", secondary=assoc_post_tag, backref='posts', lazy='subquery') @hybrid_property def tags(self): return self._tags @tags.setter def tags(self, tags): """ Sets the tags of the post to the tags given in comma delimited string form in tags_string """ tag_objs = [] for tag in tags: if not isinstance(tag, Tag): tag = tag.strip() if tag[0] == "#": tag = tag[1:] tag = Tag(name=tag) tag_objs.append(tag) self._tags = tag_objs @property def contains_excluded_tag(self): excluded_tags = current_app.config.get('EXCLUDED_TAGS', []) return any([tag.name in excluded_tags for tag in self.tags]) _groups = db.relationship("Group", secondary=assoc_post_group, backref='posts', lazy='subquery') @hybrid_property def groups(self): return self._groups @groups.setter def groups(self, groups): # given a list of group_names, we add it. group_objs = [] for group in groups: if not isinstance(group, Group): group = Group(name=group.strip()) group_objs.append(group) # create an implicit group, group_post.id, to add # single users to group = Group(name=":post_group_" + str(self.id)) # this created group should have the author associated with it # so they can add people to the post group.users = self.authors group_objs.append(group) self._groups = group_objs _status = db.Column('status', db.Integer(), default=0) @hybrid_property def status(self): return current_repo.PostStatus(self._status or 0) @status.expression def status(self): return func.coalesce(self._status, 0) @status.setter def status(self, status): if status is None: self._status = None else: assert isinstance( status, KnowledgeRepository.PostStatus ), "Status must be an instance of KnowledgeRepository.PostStatus.Status or None" self._status = status.value @hybrid_property def is_published(self): return self.status == current_repo.PostStatus.PUBLISHED @is_published.expression def is_published(self): return func.coalesce(self._status, 0) == current_repo.PostStatus.PUBLISHED.value _views = db.relationship( "PageView", lazy='dynamic', primaryjoin="and_(foreign(PageView.object_id)==Post.id, " "PageView.object_type=='post'," "PageView.object_action=='view')") @hybrid_property def views(self): return self._views.all() @hybrid_property def view_count(self): return self._views.count() @view_count.expression def view_count(self): return (select([func.count( PageView.id)]).where(PageView.object_id == self.id).where( PageView.object_type == 'post').label("view_count")) @hybrid_property def view_user_count(self): return (db.session.query(func.count(distinct( PageView.user_id))).filter(PageView.object_id == self.id).filter( PageView.object_type == 'post').scalar()) @view_user_count.expression def view_user_count(self): return (select([func.count(distinct( PageView.user_id))]).where(PageView.object_id == self.id).where( PageView.object_type == 'post').label("view_user_count")) _votes = db.relationship( "Vote", lazy='dynamic', primaryjoin="and_(foreign(Vote.object_id)==Post.id, " "Vote.object_type=='post')") @hybrid_property def votes(self): return self._votes.all() @hybrid_property def vote_count(self): """ Given the path of a post, return the total likes """ return self._votes.count() @vote_count.expression def vote_count(self): return (select([func.count(Vote.id) ]).where(Vote.object_id == self.id).where( Vote.object_type == 'post').label("vote_count")) def vote_counted_for_user(self, user_id): return (db_session.query(Vote).filter( and_(Vote.object_id == self.id, Vote.object_type == 'post', Vote.user_id == user_id)).first()) is not None _comments = db.relationship( "Comment", lazy="dynamic", primaryjoin="and_(foreign(Comment.post_id)==Post.id, " "Comment.type=='post')") @hybrid_property def comments(self): return self._comments.all() @hybrid_property def comment_count(self): """ Given the path of the a post, return the total comments """ return self._comments.count() @comment_count.expression def comment_count(self): return (select([func.count( Comment.id)]).where(Comment.post_id == self.id).where( Comment.object_type == 'post').label("comments_count")) @property def kp(self): return current_repo.post(self.path) @property def text(self): return self.kp.read() def update_metadata_from_kp(self, kp): """ :param kp: Maps fields of the model to values :type kp: KnowledgePost :param kp: Maps fields of the model to values :type kr: KnowledgeRepository :return: None :rtype: None """ headers = kp.headers self.uuid = kp.uuid self.path = kp.path self.project = headers.get('project') self.repository = kp.repository_uri self.revision = kp.revision self.title = headers['title'] self.tldr = headers['tldr'] self.authors = headers.get('authors', []) self.tags = headers.get('tags', []) self.keywords = get_keywords(self) self.thumbnail = kp.thumbnail_uri self.created_at = headers['created_at'] self.updated_at = headers['updated_at'] if self.created_at > self.updated_at: self.updated_at = self.created_at self.status = kp.status self.private = 0 # we do this check so that no header (None) and False are treated the same if headers.get('private', ''): self.private = 1 self.groups = headers.get('allowed_groups', [])
""" Returns the non-aggregated results of the season, ordered by tournament """ # See above method for the rationale about SQL performance season_tournaments = config.orm.query(Tournament).options( joinedload(Tournament.results) #@UndefinedVariable ).join(Tournament.season).filter(Season.id == self.id).order_by(Tournament.id).all() #@UndefinedVariable return [result for tournament in season_tournaments for result in tournament.results if result.rank is not None] @property def players(self): """ Returns the players who attended at least one of the games of the season """ return config.orm.query(User).join(Result.user).join(Result.tournament).join(Tournament.season).filter(#@UndefinedVariable Season.id == self.id ).filter(Result.rank != None).all() def reorder_tournaments(self): """ Reorders (i.e. enforce position) the tournaments by date. Useful when a tournament was appended in the end of the collection for example """ self.tournaments.sort(key=lambda tournament: tournament.tournament_dt) self.tournaments.reorder() def __repr__(self) : return "<Season(%s,%s)>" % (self.start_year, self.end_year) mapper(Season, seasons_table, properties={ "tournaments": relationship(Tournament, lazy="joined", backref="season", collection_class=ordering_list("position", count_from=1), order_by=Tournament.position) #@UndefinedVariable }) web.debug("[MODEL] Successfully mapped Season class")
return cls.extra_columns ## Mappers meta.mapper( Resource, resource_table, properties={ "resource_group": orm.relation( ResourceGroup, # all resources including deleted # formally package_resources_all backref=orm.backref( "resources_all", collection_class=ordering_list("position"), cascade="all, delete", order_by=resource_table.c.position, ), ) }, order_by=[resource_table.c.resource_group_id], extension=[vdm.sqlalchemy.Revisioner(resource_revision_table), extension.PluginMapperExtension()], ) meta.mapper( ResourceGroup, resource_group_table, properties={ "package": orm.relation(
class User(Serializable, db.Model): __tablename__ = 'user' id = Column(Integer, Sequence('user_id'), primary_key=True) role = relationship("Role", backref='users_of_role') role_id = Column(Integer, ForeignKey('role.id')) username = Column(Text) password = Column(Text) email = Column(Text) created_at = Column(DateTime, default=now) updated_at = Column(DateTime, onupdate=now) paper_queue = relationship( "UserHasPaper", order_by="UserHasPaper.position", collection_class=ordering_list('position'), ) # papers = relationship('UserHasPaper', backref='users_of_paper',lazy='dynamic') _dict_fields = ['id', 'role_id', 'username', 'email'] def __init__(self, username, password, role_id, email=''): self.username = username self.password = self.hash_password(password) self.role_id = role_id self.email = email def hash_password(self, password): return custom_app_context.encrypt(password) def verify_password(self, password): return custom_app_context.verify(password, self.password) def generate_auth_token(self, expiration=36000): s = Serializer(SECRET_KEY, expires_in=expiration) return s.dumps({'id': self.id, 'role_id': self.role_id}) @staticmethod def get_by_primary_key(id): user = User.query.filter_by(id=id).first() if user: return user else: raise Exception def add_paper(self, paper_id): paper = Paper.query.filter_by(id=paper_id).first() if paper: has_paper = self.UserHasPaper.query.filter_by( paper_id=paper_id).first() if has_paper is None: has_paper = self.UserHasPaper() has_paper.paper = paper else: has_paper.finished = False self.paper_queue.append(has_paper) db.session.commit() return has_paper return False class UserHasPaper(Serializable, db.Model): __tablename__ = 'userhaspaper' paper_id = Column(String(128), ForeignKey('paper.id'), primary_key=True) user_id = Column(Integer, ForeignKey('user.id'), primary_key=True) finished = Column(db.Boolean, default=False) created_at = Column(DateTime, default=now) paper = relationship("Paper") position = Column(Integer) created_at = Column(DateTime, default=now) _dict_fields = ['paper', 'finished', 'created_at', 'position']
class Listener(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin, models.TimestampMixin, base_models.NameMixin, base_models.TagMixin): __data_model__ = data_models.Listener __tablename__ = "listener" __v2_wsme__ = listener.ListenerResponse __table_args__ = (sa.UniqueConstraint( 'load_balancer_id', 'protocol_port', name='uq_listener_load_balancer_id_protocol_port'), ) description = sa.Column(sa.String(255), nullable=True) protocol = sa.Column(sa.String(16), sa.ForeignKey("protocol.name", name="fk_listener_protocol_name"), nullable=False) protocol_port = sa.Column(sa.Integer(), nullable=False) connection_limit = sa.Column(sa.Integer, nullable=True) load_balancer_id = sa.Column(sa.String(36), sa.ForeignKey( "load_balancer.id", name="fk_listener_load_balancer_id"), nullable=True) tls_certificate_id = sa.Column(sa.String(36), nullable=True) default_pool_id = sa.Column(sa.String(36), sa.ForeignKey("pool.id", name="fk_listener_pool_id"), nullable=True) provisioning_status = sa.Column( sa.String(16), sa.ForeignKey("provisioning_status.name", name="fk_listener_provisioning_status_name"), nullable=False) operating_status = sa.Column(sa.String(16), sa.ForeignKey( "operating_status.name", name="fk_listener_operating_status_name"), nullable=False) enabled = sa.Column(sa.Boolean(), nullable=False) load_balancer = orm.relationship("LoadBalancer", uselist=False, back_populates="listeners") default_pool = orm.relationship("Pool", uselist=False, back_populates="_default_listeners") sni_containers = orm.relationship('SNI', cascade='delete', uselist=True, backref=orm.backref('listener', uselist=False)) l7policies = orm.relationship('L7Policy', uselist=True, order_by='L7Policy.position', collection_class=orderinglist.ordering_list( 'position', count_from=1), cascade='delete', back_populates='listener') peer_port = sa.Column(sa.Integer(), nullable=True) insert_headers = sa.Column(sa.PickleType()) timeout_client_data = sa.Column(sa.Integer, nullable=True) timeout_member_connect = sa.Column(sa.Integer, nullable=True) timeout_member_data = sa.Column(sa.Integer, nullable=True) timeout_tcp_inspect = sa.Column(sa.Integer, nullable=True) _tags = orm.relationship( 'Tags', single_parent=True, lazy='subquery', cascade='all,delete-orphan', primaryjoin='and_(foreign(Tags.resource_id)==Listener.id)') # This property should be a unique list of the default_pool and anything # referenced by enabled L7Policies with at least one rule that also # reference this listener. The intent is that listener.pools should be a # unique list of pools this listener is *actually* using. @property def pools(self): _pools = [] _p_ids = [] if self.default_pool: _pools.append(self.default_pool) _p_ids.append(self.default_pool.id) l7_pools = [ p.redirect_pool for p in self.l7policies if p.redirect_pool is not None and len(p.l7rules) > 0 and p.enabled is True ] for p in l7_pools: if p.id not in _p_ids: _pools.append(p) _p_ids.append(p.id) return _pools
class Participant(UserMixin, Base, db.Model): id = db.Column(db.Integer, primary_key=True) branch_stack = db.relationship('Branch', backref='part', order_by='Branch.index', collection_class=ordering_list('index'), foreign_keys='Branch._part_id') current_branch = db.relationship('Branch', uselist=False, foreign_keys='Branch._part_head_id') @property def current_page(self): return self.current_branch.current_page @property def pages(self): return [p for b in self.branch_stack for p in b.pages] @property def questions(self): questions = [q for b in self.branch_stack for q in b.questions] questions.sort(key=lambda q: q.id) return questions _page_htmls = db.relationship('PageHtml', backref='part', lazy='dynamic') g = db.Column(MutableDictType, default={}) _completed = db.Column(db.Boolean, default=False) end_time = db.Column(db.DateTime) _meta = db.Column(MutableDictType, default={}) previous_status = db.Column(db.String(16)) updated = db.Column(db.Boolean, default=True) start_time = db.Column(db.DateTime) _time_expired = db.Column(db.Boolean, default=False) @property def completed(self): return self._completed @completed.setter @send_data def completed(self, completed): self._completed = completed @property def time_expired(self): return self._time_expired @time_expired.setter @send_data def time_expired(self, time_expired): self._time_expired = time_expired @property def status(self): if self.completed: return 'completed' if self.time_expired: return 'timed_out' return 'in_progress' def __init__(self, start_navigation, meta={}): """Initialize Participant Sets up the global dictionary g and metadata. Then initializes the root branch. """ ds = DataStore.query.first() ds.meta.append(meta) ds.update_status(self) self.end_time = self.start_time = datetime.utcnow() self.meta = meta.copy() self.current_branch = root = start_navigation() self.branch_stack.append(root) root.current_page = root.start_page root._isroot = True super().__init__() def update_end_time(self): self.end_time = datetime.utcnow() """Data packaging""" @property def meta(self): self._meta['ID'] = self.id self._meta['end_time'] = self.end_time self._meta['start_time'] = self.start_time self._meta['status'] = self.status return self._meta @meta.setter def meta(self, meta): self._meta = meta @property def data(self): """Participant data Note that Questions are added to the dataframe in the order in which they were created (i.e. by id). This is not necessarily the order in which they appeared to the Participant. """ self.set_order_all() questions = self.questions df = DataFrame() df.add(data=self.meta, all_rows=True) [df.add(data=q.pack_data(), all_rows=q.all_rows) for q in questions] df.pad() return df def set_order_all(self): """Set the order for all Questions A Question's order is the order in which it appeared to the Participant relative to other Questions of the same variable. These functions walk through the survey and sets the Question order. Note that a Branch's embedded data Questions are set before its Pages' Questions. A Page's timer is set before its Questions. """ var_count = {} self.set_order_branch(self.branch_stack[0], var_count) def set_order_branch(self, branch, var_count): """Set the order for Questions belonging to a given Branch""" [self.set_order_question(q, var_count) for q in branch.embedded] [self.set_order_page(p, var_count) for p in branch.pages] if branch.next_branch in self.branch_stack: self.set_order_branch(branch.next_branch, var_count) def set_order_page(self, page, var_count): """Set the order for Questions belonging to a given Page""" questions = [page.timer] + page.questions [self.set_order_question(q, var_count) for q in questions] if page.next_branch in self.branch_stack: self.set_order_branch(page.next_branch, var_count) def set_order_question(self, question, var_count): """Set the order for a given Question""" var = question.var if var is None: return if var not in var_count: var_count[var] = 0 question.order = var_count[var] var_count[var] += 1 """Forward navigation""" def _forward(self, forward_to=None): """Advance forward to specified Page""" if forward_to is None: return self._forward_one() while self.current_page.id != forward_to.id: self._forward_one() def _forward_one(self): """Advance forward one page""" if self.current_page._eligible_to_insert_branch(): self._insert_branch(self.current_page) else: self.current_branch._forward() self._forward_recurse() def _insert_branch(self, origin): """Grow and insert new Branch to branch_stack""" next_branch = origin._grow_branch() self.branch_stack.insert(self.current_branch.index + 1, next_branch) self._increment_head() def _forward_recurse(self): """Recursive forward function Advance forward until the next Page is found (i.e. is not None). """ if self.current_page is not None: return if self.current_branch._eligible_to_insert_branch(): self._insert_branch(self.current_branch) else: self._decrement_head() self.current_branch._forward() self._forward_recurse() """Backward navigation""" def _back(self, back_to=None): """Navigate backward to specified Page""" if back_to is None: return self._back_one() while self.current_page.id != back_to.id: self._back_one() def _back_one(self): """Navigate backward one Page""" if self.current_page == self.current_branch.start_page: self._remove_branch() else: self.current_branch._back() self._back_recurse() def _remove_branch(self): """Remove current branch from the branch stack""" self._decrement_head() self.branch_stack.pop(self.current_branch.index + 1) def _back_recurse(self): """Recursive back function Navigate backward until previous Page is found. """ if self._found_previous_page(): return if self.current_page is None: if self.current_branch.next_branch in self.branch_stack: self._increment_head() elif not self.current_branch.pages: self._remove_branch() else: self.current_branch._back() else: self._increment_head() self._back_recurse() def _found_previous_page(self): """Indicate that previous page has been found in backward navigation The previous page has been found when 1) the Page is not None and 2) it does not branch off to another Branch in the stack. """ return (self.current_page is not None and self.current_page.next_branch not in self.branch_stack) """General navigation and debugging""" def _increment_head(self): self.current_branch = self.branch_stack[self.current_branch.index + 1] def _decrement_head(self): self.current_branch = self.branch_stack[self.current_branch.index - 1] def view_nav(self): """Print branch stack for debugging purposes""" self.branch_stack[0].view_nav()
class CatalogItem(Base): __tablename__ = 'catalog_item' id = db.Column(db.Integer, primary_key=True) parent_id = db.Column(db.Integer, db.ForeignKey('catalog_item.id')) item_type = db.Column(db.Enum('root', 'group', 'layer'), nullable=False) position = db.Column(db.Integer) display_name = db.Column(db.Unicode) description = db.Column(db.Unicode) layer_enabled = db.Column(db.Boolean) layer_wms_id = db.Column(db.ForeignKey(Resource.id)) layer_wfs_id = db.Column(db.ForeignKey(Resource.id)) layer_webmap_id = db.Column(db.ForeignKey(Resource.id)) layer_resource_id = db.Column(db.ForeignKey(Resource.id), nullable=True) parent = db.relationship( 'CatalogItem', remote_side=id, backref=db.backref( 'children', order_by=position, cascade='all, delete-orphan', collection_class=ordering_list('position'))) def to_dict(self): if self.item_type in ('root', 'group'): children = list(self.children) sorted(children, key=lambda c: c.position) if self.item_type == 'root': return dict( item_type=self.item_type, children=[i.to_dict() for i in children], ) elif self.item_type == 'group': return dict( item_type=self.item_type, display_name=self.display_name, description=self.description, children=[i.to_dict() for i in children], ) elif self.item_type == 'layer': return dict( item_type=self.item_type, display_name=self.display_name, description=self.description, layer_enabled=self.layer_enabled, layer_webmap_id=self.layer_webmap_id, layer_wms_id=self.layer_wms_id, layer_wfs_id=self.layer_wfs_id, layer_resource_id=self.layer_resource_id ) def from_dict(self, data): assert data['item_type'] == self.item_type if data['item_type'] in ('root', 'group') and 'children' in data: self.children = [] for i in data['children']: child = CatalogItem(parent=self, item_type=i['item_type']) child.from_dict(i) self.children.append(child) for a in ('display_name', 'description', 'layer_enabled', 'layer_webmap_id', 'layer_wms_id', 'layer_wfs_id', 'layer_resource_id'): if a in data: setattr(self, a, data[a])
def test_append_reorder(self): self._setup( ordering_list("position", count_from=1, reorder_on_append=True)) s1 = Slide("Slide #1") self.assert_(not s1.bullets) self.assert_(len(s1.bullets) == 0) s1.bullets.append(Bullet("s1/b1")) self.assert_(s1.bullets) self.assert_(len(s1.bullets) == 1) self.assert_(s1.bullets[0].position == 1) s1.bullets.append(Bullet("s1/b2")) self.assert_(len(s1.bullets) == 2) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) bul = Bullet("s1/b100") bul.position = 100 s1.bullets.append(bul) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) s1.bullets.append(Bullet("s1/b4")) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) self.assert_(s1.bullets[3].position == 4) s1.bullets._reorder() self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) self.assert_(s1.bullets[3].position == 4) s1.bullets._raw_append(Bullet("raw")) self.assert_(s1.bullets[4].position is None) s1.bullets._reorder() self.assert_(s1.bullets[4].position == 5) session = fixture_session() session.add(s1) session.flush() id_ = s1.id session.expunge_all() del s1 srt = session.query(Slide).get(id_) self.assert_(srt.bullets) self.assert_(len(srt.bullets) == 5) titles = ["s1/b1", "s1/b2", "s1/b100", "s1/b4", "raw"] found = [b.text for b in srt.bullets] eq_(titles, found) srt.bullets._raw_append(Bullet("raw2")) srt.bullets[-1].position = 6 session.flush() session.expunge_all() srt = session.query(Slide).get(id_) titles = ["s1/b1", "s1/b2", "s1/b100", "s1/b4", "raw", "raw2"] found = [b.text for b in srt.bullets] eq_(titles, found)
class Page(BranchingBase, CompileBase, db.Model): id = db.Column(db.Integer, primary_key=True) @property def part(self): return self.branch.part if self.branch is not None else None _branch_id = db.Column(db.Integer, db.ForeignKey('branch.id')) _branch_head_id = db.Column(db.Integer, db.ForeignKey('branch.id')) index = db.Column(db.Integer) next_branch = db.relationship('Branch', back_populates='origin_page', uselist=False, foreign_keys='Branch._origin_page_id') _back_to_id = db.Column(db.Integer, db.ForeignKey('page.id')) back_to = db.relationship('Page', uselist=False, foreign_keys='Page._back_to_id') _forward_to_id = db.Column(db.Integer, db.ForeignKey('page.id')) forward_to = db.relationship('Page', uselist=False, foreign_keys='Page._forward_to_id') _navbar_id = db.Column(db.Integer, db.ForeignKey('navbar.id')) questions = db.relationship('Question', backref='page', order_by='Question.index', collection_class=ordering_list('index'), foreign_keys='Question._page_id') start_time = db.Column(db.DateTime) timer = db.relationship('Question', uselist=False, foreign_keys='Question._page_timer_id') _back = db.Column(db.Boolean) back_button = db.Column(MarkupType) css = db.Column(MutableListType) _direction_from = db.Column(db.String(8)) _direction_to = db.Column(db.String(8)) _forward = db.Column(db.Boolean) forward_button = db.Column(MarkupType) js = db.Column(MutableListType) question_html = db.Column(MarkupType) survey_template = db.Column(db.Text) terminal = db.Column(db.Boolean) view_template = db.Column(db.Text) @property def back(self): return self._back and not self.first_page() @back.setter def back(self, back): self._back = back @property def direction_from(self): return self._direction_from @direction_from.setter def direction_from(self, value): assert value in DIRECTIONS, ( 'Direction must be one of: {}'.format(DIRECTIONS)) self._direction_from = value @property def direction_to(self): return self._direction_to @direction_to.setter def direction_to(self, value): assert value in DIRECTIONS, ( 'Direction must be one of: {}'.format(DIRECTIONS)) self._direction_to = value @property def forward(self): return self._forward and not self.terminal @forward.setter def forward(self, forward): self._forward = forward compile = db.Column(FunctionType) debug = db.Column(FunctionType) navigate = db.Column(FunctionType) post = db.Column(FunctionType) def __init__(self, branch=None, index=None, back_to=None, forward_to=None, nav=None, questions=[], timer_var=None, all_rows=False, back=None, back_button=None, css=None, forward=True, forward_button=None, js=None, survey_template=None, terminal=False, view_template=None, compile=None, debug=None, navigate=None, post=None): self.set_branch(branch, index) self.back_to = back_to self.forward_to = forward_to self.nav = nav or current_app.nav self.questions = questions self.timer = Question(all_rows=all_rows, data=0, var=timer_var) self.back = back if back is not None else current_app.back self.back_button = back_button or current_app.back_button self.css = css or current_app.css self.forward = forward if forward is not None else current_app.forward self.forward_button = forward_button or current_app.forward_button self.js = js or current_app.js self.survey_template = survey_template or current_app.survey_template self.terminal = terminal self.view_template = view_template or current_app.view_template self.compile = compile or current_app.page_compile self.debug = debug or current_app.page_debug self.navigate = navigate self.post = post or current_app.page_post super().__init__() """API methods""" def set_branch(self, branch, index=None): self._set_parent(branch, index, 'branch', 'pages') def is_blank(self): return all([q.response is None for q in self.questions]) def reset_compile(self): compile = default_compile def reset_default(self): [q.reset_default() for q in self.questions] def reset_post(self): post = default_post def reset_timer(self): self.timer.data = 0 def is_valid(self): return all([q.error is None for q in self.questions]) def first_page(self): """Indicate that this is the first Page in the experiment""" return (self.branch is not None and self.branch._isroot and self.index == 0) """Methods executed during study""" def compile_html(self, recompile=True): """Compile question html""" if self.question_html is None or recompile: self.compile(object=self) self.question_html = Markup(''.join( [q.compile_html() for q in self.questions])) self.start_time = datetime.utcnow() return self.render(render_template(self.survey_template, page=self)) def _submit(self): """Operations executed on page submission 1. Record responses 2. If attempting to navigate backward, there is nothing more to do 3. If attempting to navigate forward, check for valid responses 4. If responses are invalid, return 5. Record data 6. Run post function """ self._update_timer() self.direction_from = request.form['direction'] [ q.record_response(request.form.getlist(q.model_id)) for q in self.questions ] if self.direction_from == 'back': return 'back' if not all([q.validate() for q in self.questions]): self.direction_from = 'invalid' return 'invalid' [q.record_data() for q in self.questions] self.post(object=self) # self.direction_from is 'forward' unless changed in post function return self.direction_from def _update_timer(self): if self.start_time is None: self.start_time = datetime.utcnow() delta = (datetime.utcnow() - self.start_time).total_seconds() self.timer.data += delta def view_nav(self, indent): """Print self and next branch for debugging purposes""" HEAD_PART = '<== head page of participant' HEAD_BRANCH = '<== head page of branch' head_part = HEAD_PART if self == self.part.current_page else '' head_branch = HEAD_BRANCH if self == self.branch.current_page else '' print(indent, self, head_branch, head_part) if self.next_branch in self.part.branch_stack: self.next_branch.view_nav()
class OptimizationProcedureORM(ProcedureMixin, BaseResultORM): """ An Optimization procedure """ __tablename__ = 'optimization_procedure' id = Column(Integer, ForeignKey('base_result.id', ondelete='cascade'), primary_key=True) def __init__(self, **kwargs): kwargs.setdefault("version", 1) self.procedure = "optimization" super().__init__(**kwargs) schema_version = Column(Integer, default=1) initial_molecule = Column(Integer, ForeignKey('molecule.id')) initial_molecule_obj = relationship(MoleculeORM, lazy='select', foreign_keys=initial_molecule) # # Results energies = Column(JSON) #Column(ARRAY(Float)) final_molecule = Column(Integer, ForeignKey('molecule.id')) final_molecule_obj = relationship(MoleculeORM, lazy='select', foreign_keys=final_molecule) # ids, calculated not stored in this table # NOTE: this won't work in SQLite since it returns ARRAYS, aggregate_order_by trajectory = column_property( select([ func.array_agg( aggregate_order_by(Trajectory.result_id, Trajectory.position)) ]).where(Trajectory.opt_id == id)) # array of objects (results) - Lazy - raise error of accessed trajectory_obj = relationship( Trajectory, cascade="all, delete-orphan", # backref="optimization_procedure", order_by=Trajectory.position, collection_class=ordering_list('position')) __mapper_args__ = { 'polymorphic_identity': 'optimization_procedure', # to have separate select when querying BaseResultsORM 'polymorphic_load': 'selectin', } __table_args__ = ( Index('ix_optimization_program', 'program'), # todo: needed for procedures? ) def update_relations(self, trajectory=None, **kwarg): # update optimization_results relations # self._update_many_to_many(opt_result_association, 'opt_id', 'result_id', # self.id, trajectory, self.trajectory) self.trajectory_obj = [] trajectory = [] if not trajectory else trajectory for result_id in trajectory: traj = Trajectory(opt_id=int(self.id), result_id=int(result_id)) self.trajectory_obj.append(traj)
class Project(Entity, ReferenceMixin, StatusMixin, DateRangeMixin, CodeMixin): """All the information about a Project in Stalker is hold in this class. Project is one of the main classes that will direct the others. A project in Stalker is a gathering point. It is mixed with :class:`.ReferenceMixin`, :class:`.StatusMixin`, :class:`.DateRangeMixin` and :class:`.CodeMixin` to give reference, status, schedule and code attribute. Please read the individual documentation of each of the mixins. **Project Users** The :attr:`.Project.users` attribute lists the users in this project. UIs like task creation for example will only list these users as available resources for this project. **TaskJuggler Integration** Stalker uses TaskJuggler for scheduling the project tasks. The :attr:`.Project.to_tjp` attribute generates a tjp compliant string which includes the project definition, the tasks of the project, the resources in the project including the vacation definitions and all the time logs recorded for the project. For custom attributes or directives that needs to be passed to TaskJuggler you can use the :attr:`.Project.custom_tjp` attribute which will be attached to the generated tjp file (inside the "project" directive). To manage all the studio projects at once (schedule them at once please use :class:`.Studio`). .. versionadded:: 0.2.13 Multiple Repositories per Project Starting with v0.2.13 Project instances can have multiple Repositories, which allows the project files to be placed in more than one repository according to the need of the studio pipeline. One great advantage of having multiple repositories is to be able to place Published versions in to another repository which is placed on to a faster server. Also the :attr:`.repositories` attribute is not a read-only attribute anymore. .. versionadded:: 0.2.15 Multiple Clients per Project It is now possible to attach multiple :class:`.Client` instances to one :class:`.Project` allowing to hold complex Projects to Client relations by using the :attr:`.ProjectClient.role` attribute of the :class:`.ProjectClient` class. :param clients: The clients which the project is affiliated with. Default value is an empty list. :type client: [:class:`.Client`] :param image_format: The output image format of the project. Default value is None. :type image_format: :class:`.ImageFormat` :param float fps: The FPS of the project, it should be a integer or float number, or a string literal which can be correctly converted to a float. Default value is 25.0. :param type: The type of the project. Default value is None. :type type: :class:`.Type` :param structure: The structure of the project. Default value is None :type structure: :class:`.Structure` :param repositories: A list of :class:`.Repository` instances that the project files are going to be stored in. You can not create a project without specifying the repositories argument and passing a :class:`.Repository` to it. Default value is None which raises a TypeError. :type repository: :class:`.Repository`. :param bool is_stereoscopic: a bool value, showing if the project is going to be a stereo 3D project, anything given as the argument will be converted to True or False. Default value is False. :param users: A list of :class:`.User`\ s holding the users in this project. This will create a reduced or grouped list of studio workers and will make it easier to define the resources for a Task related to this project. The default value is an empty list. """ __auto_name__ = False __tablename__ = "Projects" project_id = Column("id", Integer, ForeignKey("Entities.id"), primary_key=True) __mapper_args__ = { "polymorphic_identity": "Project", "inherit_condition": project_id == Entity.entity_id } active = Column(Boolean, default=True) clients = association_proxy('client_role', 'client', creator=lambda n: ProjectClient(client=n)) client_role = relationship( 'ProjectClient', back_populates='project', cascade='all, delete-orphan', cascade_backrefs=False, primaryjoin='Projects.c.id==Project_Clients.c.project_id') tasks = relationship('Task', primaryjoin='Tasks.c.project_id==Projects.c.id', uselist=True, cascade="all, delete-orphan") users = association_proxy('user_role', 'user', creator=lambda n: ProjectUser(user=n)) user_role = relationship( 'ProjectUser', back_populates='project', cascade='all, delete-orphan', cascade_backrefs=False, primaryjoin='Projects.c.id==Project_Users.c.project_id') repositories_proxy = relationship( 'ProjectRepository', back_populates='project', cascade='all, delete-orphan', cascade_backrefs=False, order_by='ProjectRepository.position', primaryjoin='Projects.c.id==Project_Repositories.c.project_id', collection_class=ordering_list('position'), doc="""The :class:`.Repository` that this project files should reside. Should be a list of :class:`.Repository`\ instances. """) repositories = association_proxy( 'repositories_proxy', 'repository', creator=lambda n: ProjectRepository(repository=n)) structure_id = Column(Integer, ForeignKey("Structures.id")) structure = relationship( "Structure", primaryjoin="Project.structure_id==Structure.structure_id", doc="""The structure of the project. Should be an instance of :class:`.Structure` class""") image_format_id = Column(Integer, ForeignKey("ImageFormats.id")) image_format = relationship( "ImageFormat", primaryjoin="Projects.c.image_format_id==ImageFormats.c.id", doc="""The :class:`.ImageFormat` of this project. This value defines the output image format of the project, should be an instance of :class:`.ImageFormat`. """) fps = Column(Float(precision=3), doc="""The fps of the project. It is a float value, any other types will be converted to float. The default value is 25.0. """) is_stereoscopic = Column( Boolean, doc="""True if the project is a stereoscopic project""") tickets = relationship('Ticket', primaryjoin='Tickets.c.project_id==Projects.c.id', uselist=True, cascade="all, delete-orphan") def __init__(self, name=None, code=None, clients=None, repositories=None, structure=None, image_format=None, fps=25.0, is_stereoscopic=False, users=None, **kwargs): # a projects project should be self # initialize the project argument to self kwargs['project'] = self kwargs['name'] = name super(Project, self).__init__(**kwargs) # call the mixin __init__ methods ReferenceMixin.__init__(self, **kwargs) StatusMixin.__init__(self, **kwargs) DateRangeMixin.__init__(self, **kwargs) self.code = code if users is None: users = [] self.users = users if repositories is None: repositories = [] self.repositories = repositories self.structure = structure if clients is None: clients = [] self.clients = clients self._sequences = [] self._assets = [] self.image_format = image_format self.fps = fps self.is_stereoscopic = bool(is_stereoscopic) self.active = True def __eq__(self, other): """the equality operator """ return super(Project, self).__eq__(other) and \ isinstance(other, Project) def __hash__(self): """the overridden __hash__ method """ return super(Project, self).__hash__() @validates("fps") def _validate_fps(self, key, fps): """validates the given fps_in value """ fps = float(fps) if fps <= 0: raise ValueError('%s.fps can not be 0 or a negative value' % self.__class__.__name__) return float(fps) @validates("image_format") def _validate_image_format(self, key, image_format): """validates the given image format """ from stalker.models.format import ImageFormat if image_format is not None and \ not isinstance(image_format, ImageFormat): raise TypeError( "%s.image_format should be an instance of " "stalker.models.format.ImageFormat, not %s" % (self.__class__.__name__, image_format.__class__.__name__)) return image_format @validates("structure") def _validate_structure(self, key, structure_in): """validates the given structure_in value """ from stalker.models.structure import Structure if structure_in is not None: if not isinstance(structure_in, Structure): raise TypeError( "%s.structure should be an instance of " "stalker.models.structure.Structure, not %s" % (self.__class__.__name__, structure_in.__class__.__name__)) return structure_in @validates('is_stereoscopic') def _validate_is_stereoscopic(self, key, is_stereoscopic_in): return bool(is_stereoscopic_in) @property def root_tasks(self): """returns a list of Tasks which have no parent """ from stalker import db, Task with db.DBSession.no_autoflush: return Task.query \ .filter(Task.project == self) \ .filter(Task.parent == None) \ .all() @property def assets(self): """returns the assets related to this project """ # use joins over the session.query from stalker.models.asset import Asset return Asset.query \ .filter(Asset.project == self) \ .all() @property def sequences(self): """returns the sequences related to this project """ # sequences are tasks, use self.tasks from stalker.models.sequence import Sequence return Sequence.query \ .filter(Sequence.project == self) \ .all() @property def shots(self): """returns the shots related to this project """ # shots are tasks, use self.tasks from stalker.models.shot import Shot return Shot.query \ .filter(Shot.project == self) \ .all() @property def to_tjp(self): """returns a TaskJuggler compatible string representing this project """ from jinja2 import Template temp = Template(defaults.tjp_project_template, trim_blocks=True, lstrip_blocks=True) return temp.render({'project': self}) @property def is_active(self): """predicate for Project.active attribute """ return self.active @property def total_logged_seconds(self): """returns an integer representing the total TimeLog seconds recorded in child tasks. """ total_logged_seconds = 0 for task in self.root_tasks: if task.total_logged_seconds is None: task.update_schedule_info() total_logged_seconds += task.total_logged_seconds logger.debug('project.total_logged_seconds: %s' % total_logged_seconds) return total_logged_seconds @property def schedule_seconds(self): """returns an integer showing the total amount of schedule timing of the in child tasks in seconds """ schedule_seconds = 0 for task in self.root_tasks: if task.schedule_seconds is None: task.update_schedule_info() schedule_seconds += task.schedule_seconds logger.debug('project.schedule_seconds: %s' % schedule_seconds) return schedule_seconds @property def percent_complete(self): """returns the percent_complete based on the total_logged_seconds and schedule_seconds of the root tasks. """ total_logged_seconds = self.total_logged_seconds schedule_seconds = self.schedule_seconds if schedule_seconds > 0: return total_logged_seconds / schedule_seconds * 100 else: return 0 @property def open_tickets(self): """The list of open :class:`.Ticket`\ s in this project. returns a list of :class:`.Ticket` instances which has a status of `Open` and created in this project. """ from stalker import Ticket, Status return Ticket.query \ .join(Status, Ticket.status) \ .filter(Ticket.project == self) \ .filter(Status.code != 'CLS') \ .all() @property def repository(self): """compatibility attribute for pre v0.2.13 systems. Returns the first repository instance in the project.repositories attribute if there is any or None """ if self.repositories: return self.repositories[0] else: return None
class TorsionDriveProcedureORM(ProcedureMixin, BaseResultORM): """ A torsion drive procedure """ __tablename__ = 'torsiondrive_procedure' id = Column(Integer, ForeignKey('base_result.id', ondelete='cascade'), primary_key=True) def __init__(self, **kwargs): kwargs.setdefault("version", 1) self.procedure = "torsiondrive" self.program = "torsiondrive" super().__init__(**kwargs) # input data (along with the mixin) # ids of the many to many relation initial_molecule = column_property( select([func.array_agg(torsion_init_mol_association.c.molecule_id)])\ .where(torsion_init_mol_association.c.torsion_id==id) ) # actual objects relation M2M, never loaded here initial_molecule_obj = relationship(MoleculeORM, secondary=torsion_init_mol_association, uselist=True, lazy='noload') optimization_spec = Column(JSON) # Output data final_energy_dict = Column(JSON) minimum_positions = Column(JSON) optimization_history_obj = relationship( OptimizationHistory, cascade="all, delete-orphan", #backref="torsiondrive_procedure", order_by=OptimizationHistory.position, collection_class=ordering_list('position'), lazy='selectin') @hybrid_property def optimization_history(self): """calculated property when accessed, not saved in the DB A view of the many to many relation in the form of a dict""" ret = {} try: for opt_history in self.optimization_history_obj: if opt_history.key in ret: ret[opt_history.key].append(str(opt_history.opt_id)) else: ret[opt_history.key] = [str(opt_history.opt_id)] except Exception as err: # raises exception of first access!! pass #print(err) return ret @optimization_history.setter def optimization_history(self, dict_values): """A private copy of the opt history as a dict Key: list of optimization procedures""" return dict_values __table_args__ = ( Index('ix_torsion_drive_program', 'program'), # todo: needed for procedures? ) __mapper_args__ = { 'polymorphic_identity': 'torsiondrive_procedure', # to have separate select when querying BaseResultsORM 'polymorphic_load': 'selectin', } def update_relations(self, initial_molecule=None, optimization_history=None, **kwarg): # update torsion molecule relation self._update_many_to_many(torsion_init_mol_association, 'torsion_id', 'molecule_id', self.id, initial_molecule, self.initial_molecule) self.optimization_history_obj = [] for key in optimization_history: for opt_id in optimization_history[key]: opt_history = OptimizationHistory(torsion_id=int(self.id), opt_id=int(opt_id), key=key) self.optimization_history_obj.append(opt_history)
class Task(Node): """ Metadata pour une tâche (estimation, invoice) """ __tablename__ = 'task' __table_args__ = default_table_args __mapper_args__ = {'polymorphic_identity': 'task'} _autonomie_service = TaskService file_requirement_service = TaskFileRequirementService mention_service = TaskMentionService id = Column( Integer, ForeignKey('node.id'), info={'export': { 'exclude': True }}, primary_key=True, ) phase_id = Column( ForeignKey('phase.id'), info={"export": { 'exclude': True }}, ) status = Column(String(10), info={ 'colanderalchemy': { 'title': u"Statut" }, 'export': { 'exclude': True } }) status_comment = Column( Text, info={ "colanderalchemy": { "title": u"Commentaires" }, 'export': { 'exclude': True } }, default="", ) status_person_id = Column( ForeignKey('accounts.id'), info={ 'colanderalchemy': { "title": u"Dernier utilisateur à avoir modifié le document", }, "export": { 'exclude': True }, }, ) status_date = Column(Date(), default=datetime.date.today, info={ 'colanderalchemy': { "title": u"Date du dernier changement de statut", }, 'export': { 'exclude': True } }) date = Column(Date(), info={"colanderalchemy": { "title": u"Date du document" }}, default=datetime.date.today) owner_id = Column( ForeignKey('accounts.id'), info={ "export": { 'exclude': True }, }, ) description = Column( Text, info={'colanderalchemy': { "title": u"Objet" }}, ) ht = Column(BigInteger(), info={ 'colanderalchemy': { "title": u"Montant HT (cache)" }, 'export': { 'exclude': True }, }, default=0) tva = Column(BigInteger(), info={ 'colanderalchemy': { "title": u"Montant TVA (cache)" }, 'export': { 'exclude': True }, }, default=0) ttc = Column(BigInteger(), info={ 'colanderalchemy': { "title": u"Montant TTC (cache)" }, 'export': { 'exclude': True }, }, default=0) company_id = Column( Integer, ForeignKey('company.id'), info={ 'export': { 'exclude': True }, }, ) project_id = Column( Integer, ForeignKey('project.id'), info={ 'export': { 'exclude': True }, }, ) customer_id = Column( Integer, ForeignKey('customer.id'), info={ 'export': { 'exclude': True }, }, ) project_index = deferred( Column( Integer, info={ 'colanderalchemy': { "title": u"Index dans le projet", }, 'export': { 'exclude': True }, }, ), group='edit', ) company_index = deferred( Column( Integer, info={ 'colanderalchemy': { "title": u"Index du document à l'échelle de l'entreprise", }, 'export': { 'exclude': True }, }, ), group='edit', ) official_number = Column( String(255), info={ 'colanderalchemy': { "title": u"Identifiant du document (facture/avoir)", }, 'export': { 'label': u"Numéro de facture" }, }, default=None, ) legacy_number = Column( Boolean, default=False, nullable=False, info={ 'export': { 'exclude': True }, }, ) internal_number = deferred(Column( String(255), default=None, info={ 'colanderalchemy': { "title": u"Identifiant du document dans la CAE", }, 'export': { 'exclude': True }, }), group='edit') display_units = deferred(Column(Integer, info={ 'colanderalchemy': { "title": u"Afficher le détail ?", "validator": colander.OneOf((0, 1)) }, 'export': { 'exclude': True }, }, default=0), group='edit') expenses_ht = deferred( Column(BigInteger(), info={ 'colanderalchemy': { 'title': u'Frais' }, 'export': { 'exclude': True }, }, default=0), group='edit', ) address = deferred( Column( Text, default="", info={ 'colanderalchemy': { 'title': u'Adresse' }, 'export': { 'exclude': True }, }, ), group='edit', ) workplace = deferred( Column(Text, default='', info={ 'colanderalchemy': { 'title': u"Lieu d'éxécution des travaux" }, })) payment_conditions = deferred( Column( Text, info={ 'colanderalchemy': { "title": u"Conditions de paiement", }, 'export': { 'exclude': True }, }, ), group='edit', ) notes = deferred( Column( Text, default="", info={ 'colanderalchemy': { 'title': u'Notes complémentaires' }, 'export': { 'exclude': True }, }, ), group='edit', ) round_floor = deferred( Column(Boolean(), default=False, info={ 'colanderalchemy': { 'exlude': True, 'title': u"Méthode d'arrondi 'à l'ancienne' ? (floor)" }, 'export': { 'exclude': True }, }), group='edit', ) business_type_id = Column(ForeignKey("business_type.id")) business_id = Column(ForeignKey("business.id")) pdf_file_id = Column(ForeignKey("file.id"), info={'colanderalchemy': { 'exclude': True }}) # Organisationnal Relationships status_person = relationship( "User", primaryjoin="Task.status_person_id==User.id", backref=backref( "taskStatuses", info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'exclude': True }, }, ), info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'exclude': True }, }, ) owner = relationship( "User", primaryjoin="Task.owner_id==User.id", backref=backref( "ownedTasks", info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'exclude': True }, }, ), info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'exclude': True }, }, ) phase = relationship( "Phase", primaryjoin="Task.phase_id==Phase.id", backref=backref( "tasks", order_by='Task.date', info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'exclude': True }, }, ), info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'exclude': True }, }, ) company = relationship( "Company", primaryjoin="Task.company_id==Company.id", info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'related_key': "name", "label": "Entreprise" }, }, ) project = relationship( "Project", primaryjoin="Task.project_id==Project.id", info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'exclude': True }, }, ) customer = relationship( "Customer", primaryjoin="Customer.id==Task.customer_id", backref=backref( 'tasks', order_by='Task.date', info={ 'colanderalchemy': { 'exclude': True }, "export": { 'exclude': True }, }, ), info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'related_key': 'label', 'label': u"Client" }, }, ) business_type = relationship("BusinessType", info={'colanderalchemy': { 'exclude': True }}) business = relationship("Business", primaryjoin="Business.id==Task.business_id", info={'colanderalchemy': { 'exclude': True }}) # Content relationships discounts = relationship( "DiscountLine", info={ 'colanderalchemy': { 'title': u"Remises" }, 'export': { 'exclude': True }, }, order_by='DiscountLine.tva', cascade="all, delete-orphan", back_populates='task', ) payments = relationship( "Payment", primaryjoin="Task.id==Payment.task_id", info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'exclude': True }, }, order_by='Payment.date', cascade="all, delete-orphan", back_populates='task', ) mentions = relationship( "TaskMention", secondary=TASK_MENTION, order_by="TaskMention.order", info={'export': { 'exclude': True }}, ) mandatory_mentions = relationship( "TaskMention", secondary=MANDATORY_TASK_MENTION, order_by="TaskMention.order", info={'export': { 'exclude': True }}, ) line_groups = relationship( "TaskLineGroup", order_by='TaskLineGroup.order', cascade="all, delete-orphan", collection_class=ordering_list('order'), info={ 'colanderalchemy': { 'title': u"Unités d'oeuvre", "validator": colander.Length(min=1, min_err=u"Une entrée est requise"), "missing": colander.required }, 'export': { 'exclude': True }, }, primaryjoin="TaskLineGroup.task_id==Task.id", back_populates='task', ) statuses = relationship( "TaskStatus", order_by="desc(TaskStatus.status_date), desc(TaskStatus.id)", cascade="all, delete-orphan", back_populates='task', info={ 'colanderalchemy': { 'exclude': True }, 'export': { 'exclude': True }, }) pdf_file = relationship("File", primaryjoin="Task.pdf_file_id==File.id", info={'colanderalchemy': { 'exclude': True }}) # Not used in latest invoices expenses = deferred(Column(BigInteger(), info={ 'export': { 'exclude': True }, }, default=0), group='edit') _name_tmpl = u"Task {}" _number_tmpl = u"{s.project.code}_{s.customer.code}_T{s.project_index}\ _{s.date:%m%y}" state_manager = None def __init__(self, user, company, **kw): project = kw['project'] company_index = self._get_company_index(company) project_index = self._get_project_index(project) self.status = 'draft' self.company = company if 'customer' in kw: customer = kw['customer'] self.address = customer.full_address self.owner = user self.status_person = user self.date = datetime.date.today() self.set_numbers(company_index, project_index) for key, value in kw.items(): setattr(self, key, value) # We add a default task line group self.line_groups.append(TaskLineGroup(order=0)) def initialize_business_datas(self, business=None): """ Initialize the business datas related to this task :param obj business: instance of :class:`autonomie.models.project.business.Business` """ if business is not None: self.business = business self.file_requirement_service.populate(self) self.mention_service.populate(self) def _get_project_index(self, project): """ Return the index of the current object in the associated project :param obj project: A Project instance in which we will look to get the current doc index :returns: The next number :rtype: int """ return -1 def _get_company_index(self, company): """ Return the index of the current object in the associated company :param obj company: A Company instance in which we will look to get the current doc index :returns: The next number :rtype: int """ return -1 def set_numbers(self, company_index, project_index): """ Handle all attributes related to the given number :param int company_index: The index of the task in the company :param int project_index: The index of the task in its project """ if company_index is None or project_index is None: raise Exception("Indexes should not be None") self.company_index = company_index self.project_index = project_index self.internal_number = self._number_tmpl.format(s=self) self.name = self._name_tmpl.format(project_index) @property def default_line_group(self): return self.line_groups[0] def __json__(self, request): """ Return the datas used by the json renderer to represent this task """ return dict( id=self.id, name=self.name, created_at=self.created_at.isoformat(), updated_at=self.updated_at.isoformat(), phase_id=self.phase_id, business_type_id=self.business_type_id, status=self.status, status_comment=self.status_comment, status_person_id=self.status_person_id, # status_date=self.status_date.isoformat(), date=self.date.isoformat(), owner_id=self.owner_id, description=self.description, ht=integer_to_amount(self.ht, 5), tva=integer_to_amount(self.tva, 5), ttc=integer_to_amount(self.ttc, 5), company_id=self.company_id, project_id=self.project_id, customer_id=self.customer_id, project_index=self.project_index, company_index=self.company_index, official_number=self.official_number, internal_number=self.internal_number, display_units=self.display_units, expenses_ht=integer_to_amount(self.expenses_ht, 5), address=self.address, workplace=self.workplace, payment_conditions=self.payment_conditions, notes=self.notes, status_history=[ status.__json__(request) for status in self.statuses ], discounts=[ discount.__json__(request) for discount in self.discounts ], payments=[payment.__json__(request) for payment in self.payments], mentions=[mention.id for mention in self.mentions], line_groups=[ group.__json__(request) for group in self.line_groups ], attachments=[file_.__json__(request) for file_ in self.files], file_requirements=[ file_req.__json__(request) for file_req in self.file_requirements ]) def set_status(self, status, request, **kw): """ set the status of a task through the state machine """ return self.state_manager.process(status, self, request, **kw) def check_status_allowed(self, status, request, **kw): return self.state_manager.check_allowed(status, self, request) @validates('status') def change_status(self, key, status): """ fired on status change, stores a new taskstatus for each status change """ logger.debug(u"# Task status change #") actual_status = self.status logger.debug(u" + was {0}, becomes {1}".format(actual_status, status)) return status def get_company(self): """ Return the company owning this task """ return self.company def get_customer(self): """ Return the customer of the current task """ return self.customer def get_company_id(self): """ Return the id of the company owning this task """ return self.company.id def __repr__(self): return u"<Task status:{s.status} id:{s.id}>".format(s=self) def get_groups(self): return [group for group in self.line_groups if group.lines] @property def all_lines(self): """ Returns a list with all task lines of the current task """ result = [] for group in self.line_groups: result.extend(group.lines) return result def get_tva_objects(self): return self._autonomie_service.get_tva_objects(self) @classmethod def get_valid_tasks(cls, *args): return cls._autonomie_service.get_valid_tasks(cls, *args) @classmethod def get_waiting_estimations(cls, *args): return cls._autonomie_service.get_waiting_estimations(*args) @classmethod def get_waiting_invoices(cls, *args): return cls._autonomie_service.get_waiting_invoices(cls, *args) def gen_business(self): """ Generate a business based on this Task :returns: A new business instance :rtype: :class:`autonomie.models.project.business.Business` """ business = Business( name=self.name, project_id=self.project_id, business_type_id=self.business_type_id, ) DBSESSION().add(business) DBSESSION().flush() business.populate_indicators() logger.debug(u"Business has id {}".format(business.id)) business.file_requirement_service.populate(business) self.business_id = business.id DBSESSION().merge(self) return business def is_training(self): return self.business_type and self.business_type.name == 'training' def persist_pdf(self, filename, pdf_buffer): """ Persist the pdf output of this task to the database :param obj pdf_buffer: A buffer (file, StringIO) :param str filename: The name of the pdf file """ from autonomie.models.files import File pdf_buffer.seek(0) self.pdf_file = File( name=filename, mimetype="application/pdf", ) self.pdf_file.data = pdf_buffer.read() DBSESSION().merge(self)
class Task(Base): """Class to store a task. Not to be used directly (import it from SQLAlchemyAll). """ __tablename__ = 'tasks' __table_args__ = ( UniqueConstraint('contest_id', 'num', name='cst_task_contest_id_num'), UniqueConstraint('contest_id', 'name', name='cst_task_contest_id_name'), CheckConstraint("token_initial <= token_max"), ) # Auto increment primary key. id = Column(Integer, primary_key=True) # Number of the task for sorting. num = Column(Integer, nullable=False) # Contest (id and object) owning the task. contest_id = Column(Integer, ForeignKey(Contest.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True) contest = relationship(Contest, backref=backref( 'tasks', collection_class=ordering_list('num'), order_by=[num], cascade="all, delete-orphan", passive_deletes=True)) # Short name and long human readable title of the task. name = Column(String, nullable=False) title = Column(String, nullable=False) # A JSON-encoded lists of strings: the language codes of the # statments that will be highlighted to all users for this task. primary_statements = Column(String, nullable=False) # Time and memory limits for every testcase. time_limit = Column(Float, nullable=True) memory_limit = Column(Integer, nullable=True) # Name of the TaskType child class suited for the task. task_type = Column(String, nullable=False) # Parameters for the task type class, JSON encoded. task_type_parameters = Column(String, nullable=False) # Name of the ScoreType child class suited for the task. score_type = Column(String, nullable=False) # Parameters for the scorer class, JSON encoded. score_parameters = Column(String, nullable=False) # Parameter to define the token behaviour. See Contest.py for # details. The only change is that these parameters influence the # contest in a task-per-task behaviour. To play a token on a given # task, a user must satisfy the condition of the contest and the # one of the task. token_initial = Column(Integer, CheckConstraint("token_initial >= 0"), nullable=True) token_max = Column(Integer, CheckConstraint("token_max > 0"), nullable=True) token_total = Column(Integer, CheckConstraint("token_total > 0"), nullable=True) token_min_interval = Column( Interval, CheckConstraint("token_min_interval >= '0 seconds'"), nullable=False) token_gen_time = Column(Interval, CheckConstraint("token_gen_time >= '0 seconds'"), nullable=False) token_gen_number = Column(Integer, CheckConstraint("token_gen_number >= 0"), nullable=False) # Maximum number of submissions or user_tests allowed for each user # on this task during the whole contest or None to not enforce # this limitation. max_submission_number = Column( Integer, CheckConstraint("max_submission_number > 0"), nullable=True) max_user_test_number = Column(Integer, CheckConstraint("max_user_test_number > 0"), nullable=True) # Minimum interval between two submissions or user_tests for this # task, or None to not enforce this limitation. min_submission_interval = Column( Interval, CheckConstraint("min_submission_interval > '0 seconds'"), nullable=True) min_user_test_interval = Column( Interval, CheckConstraint("min_user_test_interval > '0 seconds'"), nullable=True) # Follows the description of the fields automatically added by # SQLAlchemy. # submission_format (list of SubmissionFormatElement objects) # testcases (list of Testcase objects) # attachments (dict of Attachment objects indexed by filename) # managers (dict of Manager objects indexed by filename) # statements (dict of Statement objects indexed by language code) # submissions (list of Submission objects) # user_tests (list of UserTest objects) # This object (independent from SQLAlchemy) is the instance of the # ScoreType class with the given parameters, taking care of # building the scores of the submissions. scorer = None def __init__(self, name, title, statements, attachments, time_limit, memory_limit, primary_statements, task_type, task_type_parameters, submission_format, managers, score_type, score_parameters, testcases, token_initial=None, token_max=None, token_total=None, token_min_interval=timedelta(), token_gen_time=timedelta(), token_gen_number=0, max_submission_number=None, max_user_test_number=None, min_submission_interval=None, min_user_test_interval=None, contest=None, num=0): for filename, attachment in attachments.iteritems(): attachment.filename = filename for filename, manager in managers.iteritems(): manager.filename = filename for language, statement in statements.iteritems(): statement.language = language self.num = num self.name = name self.title = title self.statements = statements self.attachments = attachments self.time_limit = time_limit self.memory_limit = memory_limit self.primary_statements = primary_statements \ if primary_statements is not None else "[]" self.task_type = task_type self.task_type_parameters = task_type_parameters self.submission_format = submission_format self.managers = managers self.score_type = score_type self.score_parameters = score_parameters self.testcases = testcases self.token_initial = token_initial self.token_max = token_max self.token_total = token_total self.token_min_interval = token_min_interval self.token_gen_time = token_gen_time self.token_gen_number = token_gen_number self.max_submission_number = max_submission_number self.max_user_test_number = max_user_test_number self.min_submission_interval = min_submission_interval self.min_user_test_interval = min_user_test_interval self.contest = contest def export_to_dict(self): """Return object data as a dictionary. """ return { 'name': self.name, 'title': self.title, 'num': self.num, 'statements': [ statement.export_to_dict() for statement in self.statements.itervalues() ], 'attachments': [ attachment.export_to_dict() for attachment in self.attachments.itervalues() ], 'time_limit': self.time_limit, 'memory_limit': self.memory_limit, 'primary_statements': self.primary_statements, 'task_type': self.task_type, 'task_type_parameters': self.task_type_parameters, 'submission_format': [element.export_to_dict() for element in self.submission_format], 'managers': [ manager.export_to_dict() for manager in self.managers.itervalues() ], 'score_type': self.score_type, 'score_parameters': self.score_parameters, 'testcases': [testcase.export_to_dict() for testcase in self.testcases], 'token_initial': self.token_initial, 'token_max': self.token_max, 'token_total': self.token_total, 'token_min_interval': self.token_min_interval.total_seconds(), 'token_gen_time': self.token_gen_time.total_seconds(), 'token_gen_number': self.token_gen_number, 'max_submission_number': self.max_submission_number, 'max_user_test_number': self.max_user_test_number, 'min_submission_interval': self.min_submission_interval.total_seconds() if self.min_submission_interval is not None else None, 'min_user_test_interval': self.min_user_test_interval.total_seconds() if self.min_user_test_interval is not None else None, } @classmethod def import_from_dict(cls, data): """Build the object using data from a dictionary. """ data['attachments'] = [ Attachment.import_from_dict(attch_data) for attch_data in data['attachments'] ] data['attachments'] = dict([(attachment.filename, attachment) for attachment in data['attachments']]) data['submission_format'] = [ SubmissionFormatElement.import_from_dict(sfe_data) for sfe_data in data['submission_format'] ] data['managers'] = [ Manager.import_from_dict(manager_data) for manager_data in data['managers'] ] data['managers'] = dict([(manager.filename, manager) for manager in data['managers']]) data['testcases'] = [ Testcase.import_from_dict(testcase_data) for testcase_data in data['testcases'] ] data['statements'] = [ Statement.import_from_dict(statement_data) for statement_data in data['statements'] ] data['statements'] = dict([(statement.language, statement) for statement in data['statements']]) if 'token_min_interval' in data: data['token_min_interval'] = \ timedelta(seconds=data['token_min_interval']) if 'token_gen_time' in data: data['token_gen_time'] = timedelta(seconds=data['token_gen_time']) if 'min_submission_interval' in data and \ data['min_submission_interval'] is not None: data['min_submission_interval'] = \ timedelta(seconds=data['min_submission_interval']) if 'min_user_test_interval' in data and \ data['min_user_test_interval'] is not None: data['min_user_test_interval'] = \ timedelta(seconds=data['min_user_test_interval']) return cls(**data)
class Listener(model_base.BASEV2, model_base.HasId, model_base.HasProject): """Represents a v2 neutron listener.""" NAME = 'listener' __tablename__ = "lbaas_listeners" __table_args__ = (sa.schema.UniqueConstraint( 'loadbalancer_id', 'protocol_port', name='uniq_loadbalancer_listener_port'), ) name = sa.Column(sa.String(255)) description = sa.Column(sa.String(255)) default_pool_id = sa.Column(sa.String(36), sa.ForeignKey("lbaas_pools.id"), nullable=True) loadbalancer_id = sa.Column(sa.String(36), sa.ForeignKey("lbaas_loadbalancers.id")) protocol = sa.Column(sa.Enum(*lb_const.LISTENER_SUPPORTED_PROTOCOLS, name="listener_protocolsv2"), nullable=False) default_tls_container_id = sa.Column(sa.String(128), default=None, nullable=True) sni_containers = orm.relationship( SNI, backref=orm.backref("listener", uselist=False), uselist=True, lazy="joined", primaryjoin="Listener.id==SNI.listener_id", order_by='SNI.position', collection_class=orderinglist.ordering_list('position'), foreign_keys=[SNI.listener_id], cascade="all, delete-orphan") protocol_port = sa.Column(sa.Integer, nullable=False) connection_limit = sa.Column(sa.Integer) admin_state_up = sa.Column(sa.Boolean(), nullable=False) provisioning_status = sa.Column(sa.String(16), nullable=False) operating_status = sa.Column(sa.String(16), nullable=False) default_pool = orm.relationship(PoolV2, backref=orm.backref("listeners"), lazy='joined') loadbalancer = orm.relationship(LoadBalancer, backref=orm.backref("listeners", uselist=True), lazy='joined') l7_policies = orm.relationship( L7Policy, uselist=True, lazy="joined", primaryjoin="Listener.id==L7Policy.listener_id", order_by="L7Policy.position", collection_class=orderinglist.ordering_list('position', count_from=1), foreign_keys=[L7Policy.listener_id], cascade="all, delete-orphan", backref=orm.backref("listener")) @property def root_loadbalancer(self): return self.loadbalancer
"Add a field model." self.fieldModels.append(field) s = object_session(self) if s: s.flush() def addCardModel(self, card): "Add a card model." self.cardModels.append(card) s = object_session(self) if s: s.flush() mapper(Model, modelsTable, properties={ 'fieldModels': relation(FieldModel, backref='model', collection_class=ordering_list('ordinal'), order_by=[fieldModelsTable.c.ordinal], cascade="all, delete-orphan"), 'cardModels': relation(CardModel, backref='model', collection_class=ordering_list('ordinal'), order_by=[cardModelsTable.c.ordinal], cascade="all, delete-orphan"), }) # Model deletions ########################################################################## modelsDeletedTable = Table( 'modelsDeleted', metadata, Column('modelId', Integer, ForeignKey("models.id"), nullable=False),
primaryjoin=ArtworkArtifact.artifact_id == Artifact.id, backref='artwork_artifacts') ArtworkArtifact.artwork = association_proxy('artwork_version', 'artwork') ArtworkAuthor.author = relationship(User, primaryjoin=ArtworkAuthor.author_id == User.id, backref='artwork_authors') ArtworkAuthor.description = relationship(Post, primaryjoin=ArtworkAuthor.description_id == Post.id) ArtworkAuthor.artwork = relationship(Artwork, primaryjoin=ArtworkAuthor.artwork_id == Artwork.id, backref=backref( 'artwork_authors', cascade="all, delete-orphan", order_by=ArtworkAuthor.order, collection_class=ordering_list('order', reorder_on_append=True))) Post.poster = relationship(User, primaryjoin=Post.poster_id == User.id) Post.active_text = relationship(PostText, primaryjoin=Post.active_text_id == PostText.id) PostText.post = relationship(Post, primaryjoin=PostText.post_id == Post.id, backref=backref( "texts", order_by=PostText.posted_at)) PostText.poster = relationship(User, primaryjoin=PostText.poster_id == User.id) ArtworkComment.artwork = relationship(Artwork,
class RasterMosaicItem(Base): __tablename__ = '%s_item' % COMP_ID id = db.Column(db.Integer, primary_key=True) resource_id = db.Column(db.ForeignKey(RasterMosaic.id), nullable=False) display_name = db.Column(db.Unicode, nullable=True) fileobj_id = db.Column(db.ForeignKey(FileObj.id), nullable=True) footprint = db.Column(ga.Geometry('POLYGON', srid=4326), nullable=True) position = db.Column(db.Integer, nullable=True) fileobj = db.relationship(FileObj, lazy='joined') resource = db.relationship( RasterMosaic, backref=db.backref( 'items', cascade='all, delete-orphan', order_by=position, collection_class=ordering_list('position'), ), ) def load_file(self, filename, env): ds = gdal.Open(filename, gdal.GA_ReadOnly) if not ds: raise ValidationError( _("GDAL library was unable to open the file.")) if ds.RasterCount not in (3, 4): raise ValidationError( _("Only RGB and RGBA rasters are supported.")) dsdriver = ds.GetDriver() dsproj = ds.GetProjection() dsgtran = ds.GetGeoTransform() if dsdriver.ShortName not in SUPPORTED_DRIVERS: raise ValidationError( _("Raster has format '%(format)s', however only following formats are supported: %(all_formats)s." ) # NOQA: E501 % dict(format=dsdriver.ShortName, all_formats=", ".join(SUPPORTED_DRIVERS))) if not dsproj or not dsgtran: raise ValidationError( _("Raster files without projection info are not supported.")) data_type = None alpha_band = None has_nodata = None for bidx in range(1, ds.RasterCount + 1): band = ds.GetRasterBand(bidx) if data_type is None: data_type = band.DataType elif data_type != band.DataType: raise ValidationError( _("Complex data types are not supported.")) if band.GetRasterColorInterpretation() == gdal.GCI_AlphaBand: assert alpha_band is None, "Multiple alpha bands found!" alpha_band = bidx else: has_nodata = (has_nodata is None or has_nodata) and (band.GetNoDataValue() is not None) src_osr = osr.SpatialReference() src_osr.ImportFromWkt(dsproj) dst_osr = osr.SpatialReference() dst_osr.ImportFromEPSG(int(self.resource.srs.id)) reproject = not src_osr.IsSame(dst_osr) info = gdal.Info(filename, format='json') geom = geom_from_geojson(info['wgs84Extent']) self.footprint = ga.elements.WKBElement(bytearray(geom.wkb), srid=4326) self.fileobj = env.file_storage.fileobj(component='raster_mosaic') dst_file = env.raster_mosaic.workdir_filename(self.fileobj, makedirs=True) co = ['COMPRESS=DEFLATE', 'TILED=YES', 'BIGTIFF=YES'] if reproject: gdal.Warp( dst_file, filename, options=gdal.WarpOptions( format='GTiff', dstSRS='EPSG:%d' % self.resource.srs.id, dstAlpha=not has_nodata and alpha_band is None, creationOptions=co, ), ) else: gdal.Translate(dst_file, filename, options=gdal.TranslateOptions(format='GTiff', creationOptions=co)) self.build_overview() def build_overview(self, missing_only=False): fn = env.raster_mosaic.workdir_filename(self.fileobj) if missing_only and os.path.isfile(fn + '.ovr'): return # cleaning overviews ds = gdal.Open(fn, gdal.GA_Update) ds.BuildOverviews(overviewlist=[]) ds = None # building overviews options = { b'COMPRESS_OVERVIEW': b'DEFLATE', b'INTERLEAVE_OVERVIEW': b'PIXEL', b'BIGTIFF_OVERVIEW': b'YES', } for key, val in options.items(): gdal.SetConfigOption(key, val) try: ds = gdal.Open(fn, gdal.GA_ReadOnly) ds.BuildOverviews(b'CUBIC', overviewlist=calc_overviews_levels(ds)) ds = None finally: for key, val in options.items(): gdal.SetConfigOption(key, None) def to_dict(self): return dict(id=self.id, display_name=self.display_name)
class Collection(db.Model): """Represent a Collection record.""" def __repr__(self): return 'Collection <id: {0.id}, name: {0.name}, dbquery: {0.query}, ' \ 'nbrecs: {0.nbrecs}>'.format(self) def __unicode__(self): suffix = ' ({0})'.format(_('default')) if self.id == 1 else '' return u"{0.id}. {0.name}{1}".format(self, suffix) def __str__(self): return unicode(self).encode('utf-8') __tablename__ = 'collection' id = db.Column(db.MediumInteger(9, unsigned=True), primary_key=True) name = db.Column(db.String(255), unique=True, index=True, nullable=False) dbquery = db.Column(db.Text(20), nullable=True, index=True) nbrecs = db.Column(db.Integer(10, unsigned=True), server_default='0') # FIXME read only!!! reclist = db.Column( db.PickleType(pickler=IntbitsetPickle(), comparator=IntbitsetCmp)) _names = db.relationship( lambda: Collectionname, backref='collection', collection_class=attribute_mapped_collection('ln_type'), cascade="all, delete, delete-orphan") names = association_proxy( '_names', 'value', creator=lambda k, v: Collectionname(ln_type=k, value=v)) _boxes = db.relationship( lambda: Collectionboxname, backref='collection', collection_class=attribute_mapped_collection('ln_type'), cascade="all, delete, delete-orphan") boxes = association_proxy( '_boxes', 'value', creator=lambda k, v: Collectionboxname(ln_type=k, value=v)) _formatoptions = association_proxy('formats', 'format') # @cache.memoize(make_name=lambda fname: fname + '::' + g.ln) def formatoptions(self): if len(self._formatoptions): return [dict(f) for f in self._formatoptions] else: return [{ 'code': u'hb', 'name': _("HTML %(format)s", format=_("brief")), 'content_type': u'text/html', 'visibility': 1 }] formatoptions = property(formatoptions) _examples_example = association_proxy('_examples', 'example') @property # @cache.memoize(make_name=lambda fname: fname + '::' + g.ln) def examples(self): return list(self._examples_example) @property def name_ln(self): from invenio.legacy.search_engine import get_coll_i18nname return get_coll_i18nname(self.name, g.ln) # Another possible implementation with cache memoize # @cache.memoize # try: # return db.object_session(self).query(Collectionname).\ # with_parent(self).filter(db.and_(Collectionname.ln==g.ln, # Collectionname.type=='ln')).first().value # except: # return self.name @property # @cache.memoize(make_name=lambda fname: fname + '::' + g.ln) def portalboxes_ln(self): return db.object_session(self).query(CollectionPortalbox).\ with_parent(self).\ options(db.joinedload_all(CollectionPortalbox.portalbox)).\ filter(CollectionPortalbox.ln == g.ln).\ order_by(db.desc(CollectionPortalbox.score)).all() @property def most_specific_dad(self): return db.object_session(self).query(Collection).\ join(Collection.sons).\ filter(CollectionCollection.id_son == self.id).\ order_by(db.asc(Collection.nbrecs)).\ first() @property # @cache.memoize(make_name=lambda fname: fname + '::' + g.ln) def is_restricted(self): from invenio.legacy.search_engine import collection_restricted_p return collection_restricted_p(self.name) @property def type(self): p = re.compile("\d+:.*") if self.dbquery is not None and \ p.match(self.dbquery.lower()): return 'r' else: return 'v' _collection_children = db.relationship( lambda: CollectionCollection, collection_class=ordering_list('score'), primaryjoin=lambda: Collection.id == CollectionCollection.id_dad, foreign_keys=lambda: CollectionCollection.id_dad, order_by=lambda: db.asc(CollectionCollection.score)) _collection_children_r = db.relationship( lambda: CollectionCollection, collection_class=ordering_list('score'), primaryjoin=lambda: db.and_( Collection.id == CollectionCollection.id_dad, CollectionCollection. type == 'r'), foreign_keys=lambda: CollectionCollection.id_dad, order_by=lambda: db.asc(CollectionCollection.score)) _collection_children_v = db.relationship( lambda: CollectionCollection, collection_class=ordering_list('score'), primaryjoin=lambda: db.and_( Collection.id == CollectionCollection.id_dad, CollectionCollection. type == 'v'), foreign_keys=lambda: CollectionCollection.id_dad, order_by=lambda: db.asc(CollectionCollection.score)) collection_parents = db.relationship( lambda: CollectionCollection, collection_class=ordering_list('score'), primaryjoin=lambda: Collection.id == CollectionCollection.id_son, foreign_keys=lambda: CollectionCollection.id_son, order_by=lambda: db.asc(CollectionCollection.score)) collection_children = association_proxy('_collection_children', 'son') collection_children_r = association_proxy( '_collection_children_r', 'son', creator=lambda son: CollectionCollection(id_son=son.id, type='r')) collection_children_v = association_proxy( '_collection_children_v', 'son', creator=lambda son: CollectionCollection(id_son=son.id, type='v')) _externalcollections = db.relationship( lambda: CollectionExternalcollection, cascade="all, delete, delete-orphan") def _externalcollections_type(type): return association_proxy( '_externalcollections_' + str(type), 'externalcollection', creator=lambda ext: CollectionExternalcollection( externalcollection=ext, type=type)) externalcollections_0 = _externalcollections_type(0) externalcollections_1 = _externalcollections_type(1) externalcollections_2 = _externalcollections_type(2) externalcollections = db.relationship( lambda: CollectionExternalcollection, collection_class=external_collection_mapper, cascade="all, delete, delete-orphan") # Search options _make_field_fieldvalue = lambda type: db.relationship( lambda: CollectionFieldFieldvalue, primaryjoin=lambda: db.and_( Collection.id == CollectionFieldFieldvalue.id_collection, CollectionFieldFieldvalue.type == type), order_by=lambda: CollectionFieldFieldvalue.score) _search_within = _make_field_fieldvalue('sew') _search_options = _make_field_fieldvalue('seo') @property # @cache.memoize(make_name=lambda fname: fname + '::' + g.ln) def search_within(self): """ Collect search within options. """ default = [('', g._('any field'))] found = [(o.field.code, o.field.name_ln) for o in self._search_within] if not found: found = [ (f.name.replace(' ', ''), f.name_ln) for f in Field.query.filter( Field.name.in_(cfg['CFG_WEBSEARCH_SEARCH_WITHIN'])).all() ] return default + sorted(found, key=itemgetter(1)) @property # @cache.memoize(make_name=lambda fname: fname + '::' + g.ln) def search_options(self): return self._search_options @property # @cache.memoize(make_name=lambda fname: fname + '::' + g.ln) def ancestors_ids(self): """Get list of parent collection ids.""" output = intbitset([self.id]) for c in self.dads: ancestors = c.dad.ancestors_ids if self.id in ancestors: raise output |= ancestors return output @property # @cache.memoize(make_name=lambda fname: fname + '::' + g.ln) def descendants_ids(self): """Get list of child collection ids.""" output = intbitset([self.id]) for c in self.sons: descendants = c.son.descendants_ids if self.id in descendants: raise output |= descendants return output # Gets the list of localized names as an array collection_names = db.relationship( lambda: Collectionname, primaryjoin=lambda: Collection.id == Collectionname.id_collection, foreign_keys=lambda: Collectionname.id_collection) def translation(self, lang): """Get the translation according to the language code.""" try: return db.object_session(self).query(Collectionname).\ with_parent(self).filter(db.and_( Collectionname.ln == lang, Collectionname.type == 'ln' )).first().value except: return "" @property def sort_methods(self): """Get sort methods for collection. If not sort methods are defined for a collection the root collections sort methods are retuned. If not methods are defined for the root collection, all possible sort methods are returned. Note: Noth sorting methods and ranking methods are now defined via the sorter. """ from invenio.modules.sorter.models import BsrMETHOD, \ Collection_bsrMETHOD get_method = lambda obj: obj.bsrMETHOD for coll_id in (self.id, 1): methods = Collection_bsrMETHOD.query.filter_by( id_collection=coll_id).order_by( Collection_bsrMETHOD.score).options( db.joinedload(Collection_bsrMETHOD.bsrMETHOD)).all() if len(methods) > 0: return map(get_method, methods) return BsrMETHOD.query.order_by(BsrMETHOD.name).all() def get_collectionbox_name(self, ln=None, box_type="r"): """Return collection-specific labelling subtrees. - 'Focus on': regular collection - 'Narrow by': virtual collection - 'Latest addition': boxes If translation for given language does not exist, use label for CFG_SITE_LANG. If no custom label is defined for CFG_SITE_LANG, return default label for the box. :param ln: the language of the label :param box_type: can be 'r' (=Narrow by), 'v' (=Focus on), 'l' (=Latest additions) """ if ln is None: ln = g.ln collectionboxnamequery = db.object_session(self).query( Collectionboxname).with_parent(self) try: collectionboxname = collectionboxnamequery.filter( db.and_( Collectionboxname.ln == ln, Collectionboxname.type == box_type, )).one() except: try: collectionboxname = collectionboxnamequery.filter( db.and_( Collectionboxname.ln == ln, Collectionboxname.type == box_type, )).one() except: collectionboxname = None if collectionboxname is None: # load the right message language _ = gettext_set_language(ln) return _(Collectionboxname.TYPES.get(box_type, '')) else: return collectionboxname.value portal_boxes_ln = db.relationship( lambda: CollectionPortalbox, collection_class=ordering_list('score'), primaryjoin=lambda: Collection.id == CollectionPortalbox.id_collection, foreign_keys=lambda: CollectionPortalbox.id_collection, order_by=lambda: db.asc(CollectionPortalbox.score)) def breadcrumbs(self, builder=None, ln=None): """Return breadcrumbs for collection.""" ln = cfg.get('CFG_SITE_LANG') if ln is None else ln breadcrumbs = [] # Get breadcrumbs for most specific dad if it exists. if self.most_specific_dad is not None: breadcrumbs = self.most_specific_dad.breadcrumbs(builder=builder, ln=ln) if builder is not None: crumb = builder(self) else: crumb = dict(text=self.name_ln, url=url_for('search.collection', name=self.name)) breadcrumbs.append(crumb) return breadcrumbs
def test_append_reorder(self): self._setup(ordering_list('position', count_from=1, reorder_on_append=True)) s1 = Slide('Slide #1') self.assert_(not s1.bullets) self.assert_(len(s1.bullets) == 0) s1.bullets.append(Bullet('s1/b1')) self.assert_(s1.bullets) self.assert_(len(s1.bullets) == 1) self.assert_(s1.bullets[0].position == 1) s1.bullets.append(Bullet('s1/b2')) self.assert_(len(s1.bullets) == 2) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) bul = Bullet('s1/b100') bul.position = 100 s1.bullets.append(bul) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) s1.bullets.append(Bullet('s1/b4')) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) self.assert_(s1.bullets[3].position == 4) s1.bullets._reorder() self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) self.assert_(s1.bullets[3].position == 4) s1.bullets._raw_append(Bullet('raw')) self.assert_(s1.bullets[4].position is None) s1.bullets._reorder() self.assert_(s1.bullets[4].position == 5) session = create_session() session.add(s1) session.flush() id = s1.id session.expunge_all() del s1 srt = session.query(Slide).get(id) self.assert_(srt.bullets) self.assert_(len(srt.bullets) == 5) titles = ['s1/b1','s1/b2','s1/b100','s1/b4', 'raw'] found = [b.text for b in srt.bullets] eq_(titles, found) srt.bullets._raw_append(Bullet('raw2')) srt.bullets[-1].position = 6 session.flush() session.expunge_all() srt = session.query(Slide).get(id) titles = ['s1/b1','s1/b2','s1/b100','s1/b4', 'raw', 'raw2'] found = [b.text for b in srt.bullets] eq_(titles, found)
class UserFeatures(Base): __tablename__ = 'userfeatures' __table_args__ = ( PrimaryKeyConstraint('id'), ForeignKeyConstraint(('voicemailid', ), ('voicemail.uniqueid', )), ForeignKeyConstraint(('tenant_uuid', ), ('tenant.uuid', ), ondelete='CASCADE'), UniqueConstraint('func_key_private_template_id'), UniqueConstraint('uuid', name='userfeatures_uuid'), UniqueConstraint('email', name='userfeatures_email'), Index('userfeatures__idx__agentid', 'agentid'), Index('userfeatures__idx__firstname', 'firstname'), Index('userfeatures__idx__lastname', 'lastname'), Index('userfeatures__idx__loginclient', 'loginclient'), Index('userfeatures__idx__musiconhold', 'musiconhold'), Index('userfeatures__idx__uuid', 'uuid'), Index('userfeatures__idx__voicemailid', 'voicemailid'), ) id = Column(Integer, nullable=False) uuid = Column(String(38), nullable=False, default=new_uuid) firstname = Column(String(128), nullable=False, server_default='') email = column_property(Column(String(254)), comparator_factory=EmailComparator) voicemailid = Column(Integer) agentid = Column(Integer) pictureid = Column(Integer) tenant_uuid = Column(String(36), nullable=False) callerid = Column(String(160)) ringseconds = Column(Integer, nullable=False, server_default='30') simultcalls = Column(Integer, nullable=False, server_default='5') enableclient = Column(Integer, nullable=False, server_default='0') loginclient = Column(String(254), nullable=False, server_default='') passwdclient = Column(String(64), nullable=False, server_default='') enablehint = Column(Integer, nullable=False, server_default='1') enablevoicemail = Column(Integer, nullable=False, server_default='0') enablexfer = Column(Integer, nullable=False, server_default='0') dtmf_hangup = Column(Integer, nullable=False, server_default='0') enableonlinerec = Column(Integer, nullable=False, server_default='0') call_record_outgoing_external_enabled = Column(Boolean, nullable=False, server_default='false') call_record_outgoing_internal_enabled = Column(Boolean, nullable=False, server_default='false') call_record_incoming_external_enabled = Column(Boolean, nullable=False, server_default='false') call_record_incoming_internal_enabled = Column(Boolean, nullable=False, server_default='false') incallfilter = Column(Integer, nullable=False, server_default='0') enablednd = Column(Integer, nullable=False, server_default='0') enableunc = Column(Integer, nullable=False, server_default='0') destunc = Column(String(128), nullable=False, server_default='') enablerna = Column(Integer, nullable=False, server_default='0') destrna = Column(String(128), nullable=False, server_default='') enablebusy = Column(Integer, nullable=False, server_default='0') destbusy = Column(String(128), nullable=False, server_default='') musiconhold = Column(String(128), nullable=False, server_default='') outcallerid = Column(String(80), nullable=False, server_default='') mobilephonenumber = Column(String(128), nullable=False, server_default='') bsfilter = Column(enum.generic_bsfilter, nullable=False, server_default='no') preprocess_subroutine = Column(String(39)) timezone = Column(String(128)) language = Column(String(20)) ringintern = Column(String(64)) ringextern = Column(String(64)) ringgroup = Column(String(64)) ringforward = Column(String(64)) rightcallcode = Column(String(16)) commented = Column(Integer, nullable=False, server_default='0') func_key_template_id = Column( Integer, ForeignKey('func_key_template.id', ondelete="SET NULL")) func_key_private_template_id = Column(Integer, ForeignKey('func_key_template.id'), nullable=False) subscription_type = Column(Integer, nullable=False, server_default='0') created_at = Column(DateTime, default=datetime.datetime.utcnow, server_default=text("(now() at time zone 'utc')")) webi_lastname = Column('lastname', String(128), nullable=False, server_default='') webi_userfield = Column('userfield', String(128), nullable=False, server_default='') webi_description = Column('description', Text, nullable=False, default='') func_key_template = relationship(FuncKeyTemplate, foreign_keys=func_key_template_id) func_key_template_private = relationship( FuncKeyTemplate, foreign_keys=func_key_private_template_id) main_line_rel = relationship( "UserLine", primaryjoin="""and_( UserFeatures.id == UserLine.user_id, UserLine.main_line == True )""", ) agent = relationship( "AgentFeatures", primaryjoin="AgentFeatures.id == UserFeatures.agentid", foreign_keys='UserFeatures.agentid', viewonly=True, ) voicemail = relationship("Voicemail", back_populates="users") user_lines = relationship( 'UserLine', order_by='desc(UserLine.main_line)', collection_class=ordering_list('main_line', ordering_func=ordering_main_line), cascade='all, delete-orphan', back_populates='user', ) lines = association_proxy( 'user_lines', 'line', creator=lambda _line: UserLine(line=_line, main_user=False), ) incall_dialactions = relationship( 'Dialaction', primaryjoin="""and_( Dialaction.category == 'incall', Dialaction.action == 'user', Dialaction.actionarg1 == cast(UserFeatures.id, String) )""", foreign_keys='Dialaction.actionarg1', viewonly=True, ) incalls = association_proxy('incall_dialactions', 'incall') user_dialactions = relationship( 'Dialaction', primaryjoin="""and_( Dialaction.category == 'user', Dialaction.categoryval == cast(UserFeatures.id, String) )""", cascade='all, delete-orphan', collection_class=attribute_mapped_collection('event'), foreign_keys='Dialaction.categoryval', ) group_members = relationship( 'QueueMember', primaryjoin="""and_( QueueMember.category == 'group', QueueMember.usertype == 'user', QueueMember.userid == UserFeatures.id )""", foreign_keys='QueueMember.userid', cascade='all, delete-orphan', ) groups = association_proxy( 'group_members', 'group', creator=lambda _group: QueueMember( category='group', usertype='user', group=_group), ) queue_members = relationship( 'QueueMember', primaryjoin="""and_( QueueMember.category == 'queue', QueueMember.usertype == 'user', QueueMember.userid == UserFeatures.id )""", foreign_keys='QueueMember.userid', cascade='all, delete-orphan', ) queues = association_proxy('queue_members', 'queue') paging_users = relationship('PagingUser', cascade='all, delete-orphan') switchboard_member_users = relationship('SwitchboardMemberUser', cascade='all, delete-orphan') switchboards = association_proxy('switchboard_member_users', 'switchboard') _dialaction_actions = relationship( 'Dialaction', primaryjoin="""and_( Dialaction.action == 'user', Dialaction.actionarg1 == cast(UserFeatures.id, String), )""", foreign_keys='Dialaction.actionarg1', cascade='all, delete-orphan', ) schedule_paths = relationship( 'SchedulePath', primaryjoin="""and_( SchedulePath.path == 'user', SchedulePath.pathid == UserFeatures.id )""", foreign_keys='SchedulePath.pathid', cascade='all, delete-orphan', ) schedules = association_proxy( 'schedule_paths', 'schedule', creator=lambda _schedule: SchedulePath( path='user', schedule_id=_schedule.id, schedule=_schedule), ) call_filter_recipients = relationship( 'Callfiltermember', primaryjoin="""and_( Callfiltermember.type == 'user', Callfiltermember.bstype == 'boss', Callfiltermember.typeval == cast(UserFeatures.id, String) )""", foreign_keys='Callfiltermember.typeval', cascade='delete, delete-orphan', ) call_filter_surrogates = relationship( 'Callfiltermember', primaryjoin="""and_( Callfiltermember.type == 'user', Callfiltermember.bstype == 'secretary', Callfiltermember.typeval == cast(UserFeatures.id, String) )""", foreign_keys='Callfiltermember.typeval', cascade='delete, delete-orphan', ) call_pickup_interceptors = relationship( 'PickupMember', primaryjoin="""and_( PickupMember.category == 'member', PickupMember.membertype == 'user', PickupMember.memberid == UserFeatures.id )""", foreign_keys='PickupMember.memberid', cascade='delete, delete-orphan', ) call_pickup_targets = relationship( 'PickupMember', primaryjoin="""and_( PickupMember.category == 'pickup', PickupMember.membertype == 'user', PickupMember.memberid == UserFeatures.id )""", foreign_keys='PickupMember.memberid', cascade='delete, delete-orphan', ) rightcall_members = relationship( 'RightCallMember', primaryjoin="""and_( RightCallMember.type == 'user', RightCallMember.typeval == cast(UserFeatures.id, String) )""", foreign_keys='RightCallMember.typeval', cascade='all, delete-orphan', ) call_permissions = association_proxy('rightcall_members', 'rightcall') call_pickup_interceptor_pickups = relationship( 'Pickup', primaryjoin="""and_( PickupMember.category == 'member', PickupMember.membertype == 'user', PickupMember.memberid == UserFeatures.id )""", secondary= "join(PickupMember, Pickup, Pickup.id == PickupMember.pickupid)", secondaryjoin="Pickup.id == PickupMember.pickupid", foreign_keys='PickupMember.pickupid,PickupMember.memberid', viewonly=True, ) users_from_call_pickup_user_targets = association_proxy( 'call_pickup_interceptor_pickups', 'user_targets') users_from_call_pickup_group_targets = association_proxy( 'call_pickup_interceptor_pickups', 'users_from_group_targets') users_from_call_pickup_group_interceptors_user_targets = association_proxy( 'group_members', 'users_from_call_pickup_group_interceptor_user_targets') users_from_call_pickup_group_interceptors_group_targets = association_proxy( 'group_members', 'users_from_call_pickup_group_interceptor_group_targets') func_keys = relationship('FuncKeyDestUser', cascade='all, delete-orphan') def extrapolate_caller_id(self, extension=None): default_num = extension.exten if extension else None user_match = caller_id_regex.match(self.callerid) name = user_match.group('name') num = user_match.group('num') return name, (num or default_num) def fill_caller_id(self): if self.caller_id is None: self.caller_id = '"{}"'.format(self.fullname) @property def fallbacks(self): return self.user_dialactions @fallbacks.setter def fallbacks(self, dialactions): for event in list(self.user_dialactions.keys()): if event not in dialactions: self.user_dialactions.pop(event, None) for event, dialaction in six.iteritems(dialactions): if dialaction is None: self.user_dialactions.pop(event, None) continue if event not in self.user_dialactions: dialaction.category = 'user' dialaction.event = event self.user_dialactions[event] = dialaction self.user_dialactions[event].action = dialaction.action self.user_dialactions[event].actionarg1 = dialaction.actionarg1 self.user_dialactions[event].actionarg2 = dialaction.actionarg2 @hybrid_property def fullname(self): name = self.firstname if self.lastname: name += " {}".format(self.lastname) return name @fullname.expression def fullname(cls): return func.trim(cls.firstname + " " + cls.webi_lastname) @hybrid_property def username(self): if self.loginclient == '': return None return self.loginclient @username.expression def username(cls): return func.nullif(cls.loginclient, '') @username.setter def username(self, value): if value is None: self.loginclient = '' else: self.loginclient = value @hybrid_property def password(self): if self.passwdclient == '': return None return self.passwdclient @password.expression def password(cls): return func.nullif(cls.passwdclient, '') @password.setter def password(self, value): if value is None: self.passwdclient = '' else: self.passwdclient = value @hybrid_property def agent_id(self): return self.agentid @agent_id.setter def agent_id(self, value): self.agentid = value @hybrid_property def caller_id(self): if self.callerid == '': return None return self.callerid @caller_id.expression def caller_id(cls): return func.nullif(cls.callerid, '') @caller_id.setter def caller_id(self, value): if value is None: self.callerid = '' else: self.callerid = value @hybrid_property def outgoing_caller_id(self): if self.outcallerid == '': return None return self.outcallerid @outgoing_caller_id.expression def outgoing_caller_id(cls): return func.nullif(cls.outcallerid, '') @outgoing_caller_id.setter def outgoing_caller_id(self, value): if value is None: self.outcallerid = '' else: self.outcallerid = value @hybrid_property def music_on_hold(self): if self.musiconhold == '': return None return self.musiconhold @music_on_hold.expression def music_on_hold(cls): return func.nullif(cls.musiconhold, '') @music_on_hold.setter def music_on_hold(self, value): if value is None: self.musiconhold = '' else: self.musiconhold = value @hybrid_property def mobile_phone_number(self): if self.mobilephonenumber == '': return None return self.mobilephonenumber @mobile_phone_number.expression def mobile_phone_number(cls): return func.nullif(cls.mobilephonenumber, '') @mobile_phone_number.setter def mobile_phone_number(self, value): if value is None: self.mobilephonenumber = '' else: self.mobilephonenumber = value @hybrid_property def voicemail_id(self): return self.voicemailid @voicemail_id.setter def voicemail_id(self, value): self.voicemailid = value @hybrid_property def userfield(self): if self.webi_userfield == '': return None return self.webi_userfield @userfield.expression def userfield(cls): return func.nullif(cls.webi_userfield, '') @userfield.setter def userfield(self, value): if value is None: self.webi_userfield = '' else: self.webi_userfield = value @hybrid_property def lastname(self): if self.webi_lastname == '': return None return self.webi_lastname @lastname.expression def lastname(cls): return func.nullif(cls.webi_lastname, '') @lastname.setter def lastname(self, value): if value is None: self.webi_lastname = '' else: self.webi_lastname = value @hybrid_property def description(self): if self.webi_description == '': return None return self.webi_description @description.expression def description(cls): return func.nullif(cls.webi_description, '') @description.setter def description(self, value): if value is None: self.webi_description = '' else: self.webi_description = value @hybrid_property def template_id(self): return self.func_key_template_id @template_id.setter def template_id(self, value): self.func_key_template_id = value @hybrid_property def private_template_id(self): return self.func_key_private_template_id @private_template_id.setter def private_template_id(self, value): self.func_key_private_template_id = value @hybrid_property def incallfilter_enabled(self): return self.incallfilter == 1 @incallfilter_enabled.setter def incallfilter_enabled(self, value): self.incallfilter = int(value == 1) if value is not None else None @hybrid_property def dnd_enabled(self): return self.enablednd == 1 @dnd_enabled.setter def dnd_enabled(self, value): self.enablednd = int(value == 1) if value is not None else None @hybrid_property def supervision_enabled(self): if self.enablehint is None: return None return self.enablehint == 1 @supervision_enabled.setter def supervision_enabled(self, value): self.enablehint = int(value == 1) if value is not None else None @hybrid_property def call_transfer_enabled(self): if self.enablexfer is None: return None return self.enablexfer == 1 @call_transfer_enabled.setter def call_transfer_enabled(self, value): self.enablexfer = int(value == 1) if value is not None else None @hybrid_property def dtmf_hangup_enabled(self): if self.dtmf_hangup is None: return None return self.dtmf_hangup == 1 @dtmf_hangup_enabled.setter def dtmf_hangup_enabled(self, value): self.dtmf_hangup = int(value == 1) if value is not None else None @hybrid_property def online_call_record_enabled(self): if self.enableonlinerec is None: return None return self.enableonlinerec == 1 @online_call_record_enabled.setter def online_call_record_enabled(self, value): self.enableonlinerec = int(value == 1) if value is not None else None @hybrid_property def ring_seconds(self): return self.ringseconds @ring_seconds.setter def ring_seconds(self, value): self.ringseconds = value @hybrid_property def simultaneous_calls(self): return self.simultcalls @simultaneous_calls.setter def simultaneous_calls(self, value): self.simultcalls = value @hybrid_property def cti_enabled(self): if self.enableclient is None: return None return self.enableclient == 1 @cti_enabled.setter def cti_enabled(self, value): self.enableclient = int(value == 1) if value is not None else None @hybrid_property def busy_enabled(self): if self.enablebusy is None: return None return self.enablebusy == 1 @busy_enabled.setter def busy_enabled(self, value): self.enablebusy = int(value == 1) if value is not None else None @hybrid_property def busy_destination(self): if self.destbusy == '': return None return self.destbusy @busy_destination.expression def busy_destination(cls): return func.nullif(cls.destbusy, '') @busy_destination.setter def busy_destination(self, value): if value is None: self.destbusy = '' else: self.destbusy = value @hybrid_property def noanswer_enabled(self): if self.enablerna is None: return None return self.enablerna == 1 @noanswer_enabled.setter def noanswer_enabled(self, value): self.enablerna = int(value == 1) if value is not None else None @hybrid_property def noanswer_destination(self): if self.destrna == '': return None return self.destrna @noanswer_destination.expression def noanswer_destination(cls): return func.nullif(cls.destrna, '') @noanswer_destination.setter def noanswer_destination(self, value): if value is None: self.destrna = '' else: self.destrna = value @hybrid_property def unconditional_enabled(self): if self.enableunc is None: return None return self.enableunc == 1 @unconditional_enabled.setter def unconditional_enabled(self, value): self.enableunc = int(value == 1) if value is not None else None @hybrid_property def unconditional_destination(self): if self.destunc == '': return None return self.destunc @unconditional_destination.expression def unconditional_destination(cls): return func.nullif(cls.destunc, '') @unconditional_destination.setter def unconditional_destination(self, value): if value is None: self.destunc = '' else: self.destunc = value @hybrid_property def enabled(self): if self.commented is None: return None return self.commented == 0 @enabled.expression def enabled(cls): return not_(cast(cls.commented, Boolean)) @enabled.setter def enabled(self, value): self.commented = int(value is False) if value is not None else None @hybrid_property def call_permission_password(self): if self.rightcallcode == '': return None return self.rightcallcode @call_permission_password.expression def call_permission_password(cls): return func.nullif(cls.rightcallcode, '') @call_permission_password.setter def call_permission_password(self, value): if value == '': self.rightcallcode = None else: self.rightcallcode = value @property def forwards(self): return self @property def services(self): return self
def test_append_reorder(self): self._setup( ordering_list('position', count_from=1, reorder_on_append=True)) s1 = Slide('Slide #1') self.assert_(not s1.bullets) self.assert_(len(s1.bullets) == 0) s1.bullets.append(Bullet('s1/b1')) self.assert_(s1.bullets) self.assert_(len(s1.bullets) == 1) self.assert_(s1.bullets[0].position == 1) s1.bullets.append(Bullet('s1/b2')) self.assert_(len(s1.bullets) == 2) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) bul = Bullet('s1/b100') bul.position = 100 s1.bullets.append(bul) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) s1.bullets.append(Bullet('s1/b4')) self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) self.assert_(s1.bullets[3].position == 4) s1.bullets._reorder() self.assert_(s1.bullets[0].position == 1) self.assert_(s1.bullets[1].position == 2) self.assert_(s1.bullets[2].position == 3) self.assert_(s1.bullets[3].position == 4) s1.bullets._raw_append(Bullet('raw')) self.assert_(s1.bullets[4].position is None) s1.bullets._reorder() self.assert_(s1.bullets[4].position == 5) session = create_session() session.add(s1) session.flush() id = s1.id session.expunge_all() del s1 srt = session.query(Slide).get(id) self.assert_(srt.bullets) self.assert_(len(srt.bullets) == 5) titles = ['s1/b1', 's1/b2', 's1/b100', 's1/b4', 'raw'] found = [b.text for b in srt.bullets] eq_(titles, found) srt.bullets._raw_append(Bullet('raw2')) srt.bullets[-1].position = 6 session.flush() session.expunge_all() srt = session.query(Slide).get(id) titles = ['s1/b1', 's1/b2', 's1/b100', 's1/b4', 'raw', 'raw2'] found = [b.text for b in srt.bullets] eq_(titles, found)
class Task(CreationTimeMixin, db.Model): """ A task can have one of three types: single event, master task, slave task. A master task contains the recurrence pattern (either as a procedure or a data field) and creates many slave tasks as long as it is not completed yet. Deleting and completing a slave task whose master task is not hard-scheduled forces a new slave task to be created immediately. Disabling hard-scheduling for a master task creates a slave task if there is no non-completed one. Creating a non-hard-scheduled master task creates a slave task. """ id = db.Column(db.Integer, primary_key=True) position = db.Column(db.Integer, nullable=False) summary = db.Column(db.String(255), nullable=False) description = db.Column(db.Text(), nullable=False) visible_from = db.Column(db.Date) due = db.Column(db.Date) notify = db.Column(db.Boolean, nullable=False) completed = db.Column(db.Boolean, nullable=False) context_id = db.Column(db.Integer, db.ForeignKey("context.id"), nullable=False) context = db.relationship( "Context", backref=db.backref("tasks", collection_class=ordering_list("position"), order_by=[position])) master_task_id = db.Column(db.Integer, db.ForeignKey("task.id")) master_task = db.relationship("Task", backref=db.backref("slaves", remote_side=[id], uselist=True), remote_side=[master_task_id], uselist=False) tags = db.relationship('Tag', secondary=tasks2tags, backref=db.backref('tasks'), order_by=[Tag.position]) completion_time = db.Column(db.DateTime) recur_data = db.Column(db.String(1024)) recur_procedure = db.Column(db.String(256)) recur_last = db.Column(db.Date) recur_hardschedule = db.Column(db.Boolean) recur_fields = [ "interval", "setpos", "bymonth", "bymonthday", "byyearday", "byweekno", "byweekday", "byeaster" ] recur_last_arg_field = 0 recur_reschedule_this = False def __init__(self, summary, description, context=None, visible_from=None, due=None, notify=True, completed=False, master_task=None, tags=None): self.summary = summary self.description = description if context is not None: # set via context.tasks self.context = context self.visible_from = visible_from self.due = due self.notify = notify self.completed = completed self.master_task = master_task if tags is None: self.tags = [] else: self.tags = tags def create_slave_task(self, newduedate): if self.visible_from is None: visfrom = None else: visfrom = newduedate - (self.due - self.visible_from) task = Task(self.summary, self.description, None, visfrom, newduedate, self.notify, self.completed, self, self.tags) self.context.tasks.insert(0, task) return task def compute_next_date(self): due_set = self.due is not None if (self.recur_last is None and not due_set) or not self.recur_hardschedule: self.recur_last = date.today() elif due_set and self.recur_last is None: return self.due rrule = self.rrule if not rrule: return None next, next2 = rrule[:2] next = next.date() next2 = next2.date() if next == self.recur_last: next = next2 return next def reschedule(self, flash=True): from dtg.webapp import flash next = self.compute_next_date() assert next, "Can only schedule master tasks" self.recur_last = next self.create_slave_task(next) if flash: flash(_("Rescheduled task")) def delete(self): if self.master_task and not self.master_task.recur_hardschedule: self.master_task.reschedule() db.session.delete(self) @property def recur_next(self): if not (self.recur_data or self.recur_procedure): return return self.compute_next_date().isoformat() def get_default_rrule_args(self): return {"dtstart": self.recur_last} @property def due_marker(self): if not self.due: return days = (self.due - date.today()).days if days < 0: days_text = _("Overdue for %i days", (-days, )) klass = "overdue" elif days == 0: days_text = _("Due today") klass = "duetoday" elif days > 0: if days == 1: days_text = _("Due tomorrow") else: days_text = _("Due in %i days", (days, )) if days > 7: klass = "duefuture" else: klass = "duesoon" return days, unicode(days_text), klass @classmethod def generate_recur_data(cls, type, **data): retval = [type] for i, kwargname in enumerate(cls.recur_fields): value = data[kwargname] if i <= self.recur_last_arg_field: if value is None: retval.append("") else: retval.append(str(value)) elif value is not None: retval.append("%s=%s" % (kwargname, value)) return ";".join(retval) @property def rrule(self): _ = lambda x: x kwargs = {} if self.recur_data: values = self.recur_data.split(";") if values[0] not in ("Y", "M", "W", "D"): raise RecurInfoException( (_("Invalid recurrence type"), {})) for i, value in enumerate(values[1:]): if i <= self.recur_last_arg_field: field_name = self.recur_fields[i] kwargs[field_name] = int(value) else: try: kwargname, value = value.split("=", 1) except ValueError: raise RecurInfoException((_( "Invalid token '%(field)s', expected parameter name" ), { "field": value })) try: value = json.loads(value) except ValueError, e: raise RecurInfoException( (_("Invalid data in field '%(field)s'"), { "field": kwargname })) kwargs[kwargname] = value kwargs.update(self.get_default_rrule_args()) freq = { "Y": YEARLY, "M": MONTHLY, "W": WEEKLY, "D": DAILY }[values[0]] return rrule(freq, **kwargs) elif self.recur_procedure: try: freq, args = get_rrule_args(localeEnglish, self.recur_procedure) except ValueError, e: raise RecurInfoException( (_("Invalid recurrence procedure, see examples"), ()))